1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27#include <drm/drm_crtc_helper.h>
28#include <drm/drm_fb_helper.h>
29#include <drm/drm_fixed.h>
30#include <drm/drm_fourcc.h>
31#include <drm/drm_vblank.h>
32#include <drm/radeon_drm.h>
33
34#include "radeon.h"
35#include "atom.h"
36#include "atom-bits.h"
37
38static void atombios_overscan_setup(struct drm_crtc *crtc,
39 struct drm_display_mode *mode,
40 struct drm_display_mode *adjusted_mode)
41{
42 struct drm_device *dev = crtc->dev;
43 struct radeon_device *rdev = dev->dev_private;
44 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
45 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
46 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
47 int a1, a2;
48
49 memset(&args, 0, sizeof(args));
50
51 args.ucCRTC = radeon_crtc->crtc_id;
52
53 switch (radeon_crtc->rmx_type) {
54 case RMX_CENTER:
55 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
56 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
57 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
58 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
59 break;
60 case RMX_ASPECT:
61 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
62 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
63
64 if (a1 > a2) {
65 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
66 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
67 } else if (a2 > a1) {
68 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
69 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
70 }
71 break;
72 case RMX_FULL:
73 default:
74 args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border);
75 args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border);
76 args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border);
77 args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border);
78 break;
79 }
80 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
81}
82
83static void atombios_scaler_setup(struct drm_crtc *crtc)
84{
85 struct drm_device *dev = crtc->dev;
86 struct radeon_device *rdev = dev->dev_private;
87 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
88 ENABLE_SCALER_PS_ALLOCATION args;
89 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
90 struct radeon_encoder *radeon_encoder =
91 to_radeon_encoder(radeon_crtc->encoder);
92
93 enum radeon_tv_std tv_std = TV_STD_NTSC;
94 bool is_tv = false, is_cv = false;
95
96 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
97 return;
98
99 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
100 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
101 tv_std = tv_dac->tv_std;
102 is_tv = true;
103 }
104
105 memset(&args, 0, sizeof(args));
106
107 args.ucScaler = radeon_crtc->crtc_id;
108
109 if (is_tv) {
110 switch (tv_std) {
111 case TV_STD_NTSC:
112 default:
113 args.ucTVStandard = ATOM_TV_NTSC;
114 break;
115 case TV_STD_PAL:
116 args.ucTVStandard = ATOM_TV_PAL;
117 break;
118 case TV_STD_PAL_M:
119 args.ucTVStandard = ATOM_TV_PALM;
120 break;
121 case TV_STD_PAL_60:
122 args.ucTVStandard = ATOM_TV_PAL60;
123 break;
124 case TV_STD_NTSC_J:
125 args.ucTVStandard = ATOM_TV_NTSCJ;
126 break;
127 case TV_STD_SCART_PAL:
128 args.ucTVStandard = ATOM_TV_PAL;
129 break;
130 case TV_STD_SECAM:
131 args.ucTVStandard = ATOM_TV_SECAM;
132 break;
133 case TV_STD_PAL_CN:
134 args.ucTVStandard = ATOM_TV_PALCN;
135 break;
136 }
137 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
138 } else if (is_cv) {
139 args.ucTVStandard = ATOM_TV_CV;
140 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
141 } else {
142 switch (radeon_crtc->rmx_type) {
143 case RMX_FULL:
144 args.ucEnable = ATOM_SCALER_EXPANSION;
145 break;
146 case RMX_CENTER:
147 args.ucEnable = ATOM_SCALER_CENTER;
148 break;
149 case RMX_ASPECT:
150 args.ucEnable = ATOM_SCALER_EXPANSION;
151 break;
152 default:
153 if (ASIC_IS_AVIVO(rdev))
154 args.ucEnable = ATOM_SCALER_DISABLE;
155 else
156 args.ucEnable = ATOM_SCALER_CENTER;
157 break;
158 }
159 }
160 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
161 if ((is_tv || is_cv)
162 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
163 atom_rv515_force_tv_scaler(rdev, radeon_crtc);
164 }
165}
166
167static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
168{
169 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
170 struct drm_device *dev = crtc->dev;
171 struct radeon_device *rdev = dev->dev_private;
172 int index =
173 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
174 ENABLE_CRTC_PS_ALLOCATION args;
175
176 memset(&args, 0, sizeof(args));
177
178 args.ucCRTC = radeon_crtc->crtc_id;
179 args.ucEnable = lock;
180
181 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
182}
183
184static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
185{
186 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
187 struct drm_device *dev = crtc->dev;
188 struct radeon_device *rdev = dev->dev_private;
189 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
190 ENABLE_CRTC_PS_ALLOCATION args;
191
192 memset(&args, 0, sizeof(args));
193
194 args.ucCRTC = radeon_crtc->crtc_id;
195 args.ucEnable = state;
196
197 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
198}
199
200static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
201{
202 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
203 struct drm_device *dev = crtc->dev;
204 struct radeon_device *rdev = dev->dev_private;
205 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
206 ENABLE_CRTC_PS_ALLOCATION args;
207
208 memset(&args, 0, sizeof(args));
209
210 args.ucCRTC = radeon_crtc->crtc_id;
211 args.ucEnable = state;
212
213 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
214}
215
216static const u32 vga_control_regs[6] =
217{
218 AVIVO_D1VGA_CONTROL,
219 AVIVO_D2VGA_CONTROL,
220 EVERGREEN_D3VGA_CONTROL,
221 EVERGREEN_D4VGA_CONTROL,
222 EVERGREEN_D5VGA_CONTROL,
223 EVERGREEN_D6VGA_CONTROL,
224};
225
226static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
227{
228 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
229 struct drm_device *dev = crtc->dev;
230 struct radeon_device *rdev = dev->dev_private;
231 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
232 BLANK_CRTC_PS_ALLOCATION args;
233 u32 vga_control = 0;
234
235 memset(&args, 0, sizeof(args));
236
237 if (ASIC_IS_DCE8(rdev)) {
238 vga_control = RREG32(vga_control_regs[radeon_crtc->crtc_id]);
239 WREG32(vga_control_regs[radeon_crtc->crtc_id], vga_control | 1);
240 }
241
242 args.ucCRTC = radeon_crtc->crtc_id;
243 args.ucBlanking = state;
244
245 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
246
247 if (ASIC_IS_DCE8(rdev))
248 WREG32(vga_control_regs[radeon_crtc->crtc_id], vga_control);
249}
250
251static void atombios_powergate_crtc(struct drm_crtc *crtc, int state)
252{
253 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
254 struct drm_device *dev = crtc->dev;
255 struct radeon_device *rdev = dev->dev_private;
256 int index = GetIndexIntoMasterTable(COMMAND, EnableDispPowerGating);
257 ENABLE_DISP_POWER_GATING_PARAMETERS_V2_1 args;
258
259 memset(&args, 0, sizeof(args));
260
261 args.ucDispPipeId = radeon_crtc->crtc_id;
262 args.ucEnable = state;
263
264 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
265}
266
267void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
268{
269 struct drm_device *dev = crtc->dev;
270 struct radeon_device *rdev = dev->dev_private;
271 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
272
273 switch (mode) {
274 case DRM_MODE_DPMS_ON:
275 radeon_crtc->enabled = true;
276 atombios_enable_crtc(crtc, ATOM_ENABLE);
277 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
278 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
279 atombios_blank_crtc(crtc, ATOM_DISABLE);
280 if (dev->num_crtcs > radeon_crtc->crtc_id)
281 drm_crtc_vblank_on(crtc);
282 radeon_crtc_load_lut(crtc);
283 break;
284 case DRM_MODE_DPMS_STANDBY:
285 case DRM_MODE_DPMS_SUSPEND:
286 case DRM_MODE_DPMS_OFF:
287 if (dev->num_crtcs > radeon_crtc->crtc_id)
288 drm_crtc_vblank_off(crtc);
289 if (radeon_crtc->enabled)
290 atombios_blank_crtc(crtc, ATOM_ENABLE);
291 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
292 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
293 atombios_enable_crtc(crtc, ATOM_DISABLE);
294 radeon_crtc->enabled = false;
295 break;
296 }
297
298 radeon_pm_compute_clocks(rdev);
299}
300
301static void
302atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
303 struct drm_display_mode *mode)
304{
305 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
306 struct drm_device *dev = crtc->dev;
307 struct radeon_device *rdev = dev->dev_private;
308 SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
309 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
310 u16 misc = 0;
311
312 memset(&args, 0, sizeof(args));
313 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
314 args.usH_Blanking_Time =
315 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
316 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
317 args.usV_Blanking_Time =
318 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
319 args.usH_SyncOffset =
320 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
321 args.usH_SyncWidth =
322 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
323 args.usV_SyncOffset =
324 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
325 args.usV_SyncWidth =
326 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
327 args.ucH_Border = radeon_crtc->h_border;
328 args.ucV_Border = radeon_crtc->v_border;
329
330 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
331 misc |= ATOM_VSYNC_POLARITY;
332 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
333 misc |= ATOM_HSYNC_POLARITY;
334 if (mode->flags & DRM_MODE_FLAG_CSYNC)
335 misc |= ATOM_COMPOSITESYNC;
336 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
337 misc |= ATOM_INTERLACE;
338 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
339 misc |= ATOM_DOUBLE_CLOCK_MODE;
340 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
341 misc |= ATOM_H_REPLICATIONBY2 | ATOM_V_REPLICATIONBY2;
342
343 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
344 args.ucCRTC = radeon_crtc->crtc_id;
345
346 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
347}
348
349static void atombios_crtc_set_timing(struct drm_crtc *crtc,
350 struct drm_display_mode *mode)
351{
352 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
353 struct drm_device *dev = crtc->dev;
354 struct radeon_device *rdev = dev->dev_private;
355 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
356 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
357 u16 misc = 0;
358
359 memset(&args, 0, sizeof(args));
360 args.usH_Total = cpu_to_le16(mode->crtc_htotal);
361 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
362 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
363 args.usH_SyncWidth =
364 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
365 args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
366 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
367 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
368 args.usV_SyncWidth =
369 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
370
371 args.ucOverscanRight = radeon_crtc->h_border;
372 args.ucOverscanLeft = radeon_crtc->h_border;
373 args.ucOverscanBottom = radeon_crtc->v_border;
374 args.ucOverscanTop = radeon_crtc->v_border;
375
376 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
377 misc |= ATOM_VSYNC_POLARITY;
378 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
379 misc |= ATOM_HSYNC_POLARITY;
380 if (mode->flags & DRM_MODE_FLAG_CSYNC)
381 misc |= ATOM_COMPOSITESYNC;
382 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
383 misc |= ATOM_INTERLACE;
384 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
385 misc |= ATOM_DOUBLE_CLOCK_MODE;
386 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
387 misc |= ATOM_H_REPLICATIONBY2 | ATOM_V_REPLICATIONBY2;
388
389 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
390 args.ucCRTC = radeon_crtc->crtc_id;
391
392 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
393}
394
395static void atombios_disable_ss(struct radeon_device *rdev, int pll_id)
396{
397 u32 ss_cntl;
398
399 if (ASIC_IS_DCE4(rdev)) {
400 switch (pll_id) {
401 case ATOM_PPLL1:
402 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
403 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
404 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
405 break;
406 case ATOM_PPLL2:
407 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
408 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
409 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
410 break;
411 case ATOM_DCPLL:
412 case ATOM_PPLL_INVALID:
413 return;
414 }
415 } else if (ASIC_IS_AVIVO(rdev)) {
416 switch (pll_id) {
417 case ATOM_PPLL1:
418 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
419 ss_cntl &= ~1;
420 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
421 break;
422 case ATOM_PPLL2:
423 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
424 ss_cntl &= ~1;
425 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
426 break;
427 case ATOM_DCPLL:
428 case ATOM_PPLL_INVALID:
429 return;
430 }
431 }
432}
433
434
435union atom_enable_ss {
436 ENABLE_LVDS_SS_PARAMETERS lvds_ss;
437 ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2;
438 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
439 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2;
440 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3;
441};
442
443static void atombios_crtc_program_ss(struct radeon_device *rdev,
444 int enable,
445 int pll_id,
446 int crtc_id,
447 struct radeon_atom_ss *ss)
448{
449 unsigned i;
450 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
451 union atom_enable_ss args;
452
453 if (enable) {
454
455
456
457
458
459 if (ss->percentage == 0)
460 return;
461 if (ss->type & ATOM_EXTERNAL_SS_MASK)
462 return;
463 } else {
464 for (i = 0; i < rdev->num_crtc; i++) {
465 if (rdev->mode_info.crtcs[i] &&
466 rdev->mode_info.crtcs[i]->enabled &&
467 i != crtc_id &&
468 pll_id == rdev->mode_info.crtcs[i]->pll_id) {
469
470
471
472
473 return;
474 }
475 }
476 }
477
478 memset(&args, 0, sizeof(args));
479
480 if (ASIC_IS_DCE5(rdev)) {
481 args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0);
482 args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
483 switch (pll_id) {
484 case ATOM_PPLL1:
485 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL;
486 break;
487 case ATOM_PPLL2:
488 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL;
489 break;
490 case ATOM_DCPLL:
491 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL;
492 break;
493 case ATOM_PPLL_INVALID:
494 return;
495 }
496 args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
497 args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
498 args.v3.ucEnable = enable;
499 } else if (ASIC_IS_DCE4(rdev)) {
500 args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
501 args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
502 switch (pll_id) {
503 case ATOM_PPLL1:
504 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL;
505 break;
506 case ATOM_PPLL2:
507 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL;
508 break;
509 case ATOM_DCPLL:
510 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL;
511 break;
512 case ATOM_PPLL_INVALID:
513 return;
514 }
515 args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
516 args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
517 args.v2.ucEnable = enable;
518 } else if (ASIC_IS_DCE3(rdev)) {
519 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
520 args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
521 args.v1.ucSpreadSpectrumStep = ss->step;
522 args.v1.ucSpreadSpectrumDelay = ss->delay;
523 args.v1.ucSpreadSpectrumRange = ss->range;
524 args.v1.ucPpll = pll_id;
525 args.v1.ucEnable = enable;
526 } else if (ASIC_IS_AVIVO(rdev)) {
527 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
528 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
529 atombios_disable_ss(rdev, pll_id);
530 return;
531 }
532 args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
533 args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
534 args.lvds_ss_2.ucSpreadSpectrumStep = ss->step;
535 args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay;
536 args.lvds_ss_2.ucSpreadSpectrumRange = ss->range;
537 args.lvds_ss_2.ucEnable = enable;
538 } else {
539 if (enable == ATOM_DISABLE) {
540 atombios_disable_ss(rdev, pll_id);
541 return;
542 }
543 args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
544 args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
545 args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2;
546 args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4;
547 args.lvds_ss.ucEnable = enable;
548 }
549 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
550}
551
552union adjust_pixel_clock {
553 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
554 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
555};
556
557static u32 atombios_adjust_pll(struct drm_crtc *crtc,
558 struct drm_display_mode *mode)
559{
560 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
561 struct drm_device *dev = crtc->dev;
562 struct radeon_device *rdev = dev->dev_private;
563 struct drm_encoder *encoder = radeon_crtc->encoder;
564 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
565 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
566 u32 adjusted_clock = mode->clock;
567 int encoder_mode = atombios_get_encoder_mode(encoder);
568 u32 dp_clock = mode->clock;
569 u32 clock = mode->clock;
570 int bpc = radeon_crtc->bpc;
571 bool is_duallink = radeon_dig_monitor_is_duallink(encoder, mode->clock);
572
573
574 radeon_crtc->pll_flags = 0;
575
576 if (ASIC_IS_AVIVO(rdev)) {
577 if ((rdev->family == CHIP_RS600) ||
578 (rdev->family == CHIP_RS690) ||
579 (rdev->family == CHIP_RS740))
580 radeon_crtc->pll_flags |= (
581 RADEON_PLL_PREFER_CLOSEST_LOWER);
582
583 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000)
584 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
585 else
586 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
587
588 if (rdev->family < CHIP_RV770)
589 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP;
590
591 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev))
592 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
593
594 if (((rdev->family == CHIP_RS780) || (rdev->family == CHIP_RS880))
595 && !radeon_crtc->ss_enabled)
596 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
597 if (ASIC_IS_DCE32(rdev) && mode->clock > 165000)
598 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
599 } else {
600 radeon_crtc->pll_flags |= RADEON_PLL_LEGACY;
601
602 if (mode->clock > 200000)
603 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
604 else
605 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
606 }
607
608 if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
609 (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
610 if (connector) {
611 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
612 struct radeon_connector_atom_dig *dig_connector =
613 radeon_connector->con_priv;
614
615 dp_clock = dig_connector->dp_clock;
616 }
617 }
618
619 if (radeon_encoder->is_mst_encoder) {
620 struct radeon_encoder_mst *mst_enc = radeon_encoder->enc_priv;
621 struct radeon_connector_atom_dig *dig_connector = mst_enc->connector->con_priv;
622
623 dp_clock = dig_connector->dp_clock;
624 }
625
626
627 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
628 if (radeon_crtc->ss_enabled) {
629 if (radeon_crtc->ss.refdiv) {
630 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
631 radeon_crtc->pll_reference_div = radeon_crtc->ss.refdiv;
632 if (ASIC_IS_AVIVO(rdev) &&
633 rdev->family != CHIP_RS780 &&
634 rdev->family != CHIP_RS880)
635 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
636 }
637 }
638 }
639
640 if (ASIC_IS_AVIVO(rdev)) {
641
642 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
643 adjusted_clock = mode->clock * 2;
644 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
645 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
646 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
647 radeon_crtc->pll_flags |= RADEON_PLL_IS_LCD;
648 } else {
649 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
650 radeon_crtc->pll_flags |= RADEON_PLL_NO_ODD_POST_DIV;
651 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
652 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
653 }
654
655
656 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
657 switch (bpc) {
658 case 8:
659 default:
660 break;
661 case 10:
662 clock = (clock * 5) / 4;
663 break;
664 case 12:
665 clock = (clock * 3) / 2;
666 break;
667 case 16:
668 clock = clock * 2;
669 break;
670 }
671 }
672
673
674
675
676
677 if (ASIC_IS_DCE3(rdev)) {
678 union adjust_pixel_clock args;
679 u8 frev, crev;
680 int index;
681
682 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
683 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
684 &crev))
685 return adjusted_clock;
686
687 memset(&args, 0, sizeof(args));
688
689 switch (frev) {
690 case 1:
691 switch (crev) {
692 case 1:
693 case 2:
694 args.v1.usPixelClock = cpu_to_le16(clock / 10);
695 args.v1.ucTransmitterID = radeon_encoder->encoder_id;
696 args.v1.ucEncodeMode = encoder_mode;
697 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
698 args.v1.ucConfig |=
699 ADJUST_DISPLAY_CONFIG_SS_ENABLE;
700
701 atom_execute_table(rdev->mode_info.atom_context,
702 index, (uint32_t *)&args);
703 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
704 break;
705 case 3:
706 args.v3.sInput.usPixelClock = cpu_to_le16(clock / 10);
707 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
708 args.v3.sInput.ucEncodeMode = encoder_mode;
709 args.v3.sInput.ucDispPllConfig = 0;
710 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
711 args.v3.sInput.ucDispPllConfig |=
712 DISPPLL_CONFIG_SS_ENABLE;
713 if (ENCODER_MODE_IS_DP(encoder_mode)) {
714 args.v3.sInput.ucDispPllConfig |=
715 DISPPLL_CONFIG_COHERENT_MODE;
716
717 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
718 } else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
719 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
720 if (dig->coherent_mode)
721 args.v3.sInput.ucDispPllConfig |=
722 DISPPLL_CONFIG_COHERENT_MODE;
723 if (is_duallink)
724 args.v3.sInput.ucDispPllConfig |=
725 DISPPLL_CONFIG_DUAL_LINK;
726 }
727 if (radeon_encoder_get_dp_bridge_encoder_id(encoder) !=
728 ENCODER_OBJECT_ID_NONE)
729 args.v3.sInput.ucExtTransmitterID =
730 radeon_encoder_get_dp_bridge_encoder_id(encoder);
731 else
732 args.v3.sInput.ucExtTransmitterID = 0;
733
734 atom_execute_table(rdev->mode_info.atom_context,
735 index, (uint32_t *)&args);
736 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
737 if (args.v3.sOutput.ucRefDiv) {
738 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
739 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
740 radeon_crtc->pll_reference_div = args.v3.sOutput.ucRefDiv;
741 }
742 if (args.v3.sOutput.ucPostDiv) {
743 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
744 radeon_crtc->pll_flags |= RADEON_PLL_USE_POST_DIV;
745 radeon_crtc->pll_post_div = args.v3.sOutput.ucPostDiv;
746 }
747 break;
748 default:
749 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
750 return adjusted_clock;
751 }
752 break;
753 default:
754 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
755 return adjusted_clock;
756 }
757 }
758 return adjusted_clock;
759}
760
761union set_pixel_clock {
762 SET_PIXEL_CLOCK_PS_ALLOCATION base;
763 PIXEL_CLOCK_PARAMETERS v1;
764 PIXEL_CLOCK_PARAMETERS_V2 v2;
765 PIXEL_CLOCK_PARAMETERS_V3 v3;
766 PIXEL_CLOCK_PARAMETERS_V5 v5;
767 PIXEL_CLOCK_PARAMETERS_V6 v6;
768};
769
770
771
772
773static void atombios_crtc_set_disp_eng_pll(struct radeon_device *rdev,
774 u32 dispclk)
775{
776 u8 frev, crev;
777 int index;
778 union set_pixel_clock args;
779
780 memset(&args, 0, sizeof(args));
781
782 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
783 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
784 &crev))
785 return;
786
787 switch (frev) {
788 case 1:
789 switch (crev) {
790 case 5:
791
792
793
794 args.v5.ucCRTC = ATOM_CRTC_INVALID;
795 args.v5.usPixelClock = cpu_to_le16(dispclk);
796 args.v5.ucPpll = ATOM_DCPLL;
797 break;
798 case 6:
799
800
801
802 args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk);
803 if (ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev))
804 args.v6.ucPpll = ATOM_EXT_PLL1;
805 else if (ASIC_IS_DCE6(rdev))
806 args.v6.ucPpll = ATOM_PPLL0;
807 else
808 args.v6.ucPpll = ATOM_DCPLL;
809 break;
810 default:
811 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
812 return;
813 }
814 break;
815 default:
816 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
817 return;
818 }
819 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
820}
821
822static void atombios_crtc_program_pll(struct drm_crtc *crtc,
823 u32 crtc_id,
824 int pll_id,
825 u32 encoder_mode,
826 u32 encoder_id,
827 u32 clock,
828 u32 ref_div,
829 u32 fb_div,
830 u32 frac_fb_div,
831 u32 post_div,
832 int bpc,
833 bool ss_enabled,
834 struct radeon_atom_ss *ss)
835{
836 struct drm_device *dev = crtc->dev;
837 struct radeon_device *rdev = dev->dev_private;
838 u8 frev, crev;
839 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
840 union set_pixel_clock args;
841
842 memset(&args, 0, sizeof(args));
843
844 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
845 &crev))
846 return;
847
848 switch (frev) {
849 case 1:
850 switch (crev) {
851 case 1:
852 if (clock == ATOM_DISABLE)
853 return;
854 args.v1.usPixelClock = cpu_to_le16(clock / 10);
855 args.v1.usRefDiv = cpu_to_le16(ref_div);
856 args.v1.usFbDiv = cpu_to_le16(fb_div);
857 args.v1.ucFracFbDiv = frac_fb_div;
858 args.v1.ucPostDiv = post_div;
859 args.v1.ucPpll = pll_id;
860 args.v1.ucCRTC = crtc_id;
861 args.v1.ucRefDivSrc = 1;
862 break;
863 case 2:
864 args.v2.usPixelClock = cpu_to_le16(clock / 10);
865 args.v2.usRefDiv = cpu_to_le16(ref_div);
866 args.v2.usFbDiv = cpu_to_le16(fb_div);
867 args.v2.ucFracFbDiv = frac_fb_div;
868 args.v2.ucPostDiv = post_div;
869 args.v2.ucPpll = pll_id;
870 args.v2.ucCRTC = crtc_id;
871 args.v2.ucRefDivSrc = 1;
872 break;
873 case 3:
874 args.v3.usPixelClock = cpu_to_le16(clock / 10);
875 args.v3.usRefDiv = cpu_to_le16(ref_div);
876 args.v3.usFbDiv = cpu_to_le16(fb_div);
877 args.v3.ucFracFbDiv = frac_fb_div;
878 args.v3.ucPostDiv = post_div;
879 args.v3.ucPpll = pll_id;
880 if (crtc_id == ATOM_CRTC2)
881 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC2;
882 else
883 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC1;
884 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
885 args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC;
886 args.v3.ucTransmitterId = encoder_id;
887 args.v3.ucEncoderMode = encoder_mode;
888 break;
889 case 5:
890 args.v5.ucCRTC = crtc_id;
891 args.v5.usPixelClock = cpu_to_le16(clock / 10);
892 args.v5.ucRefDiv = ref_div;
893 args.v5.usFbDiv = cpu_to_le16(fb_div);
894 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
895 args.v5.ucPostDiv = post_div;
896 args.v5.ucMiscInfo = 0;
897 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
898 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC;
899 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
900 switch (bpc) {
901 case 8:
902 default:
903 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP;
904 break;
905 case 10:
906
907 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_32BPP;
908 break;
909 case 12:
910
911 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP;
912 break;
913 }
914 }
915 args.v5.ucTransmitterID = encoder_id;
916 args.v5.ucEncoderMode = encoder_mode;
917 args.v5.ucPpll = pll_id;
918 break;
919 case 6:
920 args.v6.ulDispEngClkFreq = cpu_to_le32(crtc_id << 24 | clock / 10);
921 args.v6.ucRefDiv = ref_div;
922 args.v6.usFbDiv = cpu_to_le16(fb_div);
923 args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
924 args.v6.ucPostDiv = post_div;
925 args.v6.ucMiscInfo = 0;
926 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
927 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC;
928 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
929 switch (bpc) {
930 case 8:
931 default:
932 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP;
933 break;
934 case 10:
935 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP_V6;
936 break;
937 case 12:
938 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP_V6;
939 break;
940 case 16:
941 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP;
942 break;
943 }
944 }
945 args.v6.ucTransmitterID = encoder_id;
946 args.v6.ucEncoderMode = encoder_mode;
947 args.v6.ucPpll = pll_id;
948 break;
949 default:
950 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
951 return;
952 }
953 break;
954 default:
955 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
956 return;
957 }
958
959 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
960}
961
962static bool atombios_crtc_prepare_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
963{
964 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
965 struct drm_device *dev = crtc->dev;
966 struct radeon_device *rdev = dev->dev_private;
967 struct radeon_encoder *radeon_encoder =
968 to_radeon_encoder(radeon_crtc->encoder);
969 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
970
971 radeon_crtc->bpc = 8;
972 radeon_crtc->ss_enabled = false;
973
974 if (radeon_encoder->is_mst_encoder) {
975 radeon_dp_mst_prepare_pll(crtc, mode);
976 } else if ((radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
977 (radeon_encoder_get_dp_bridge_encoder_id(radeon_crtc->encoder) != ENCODER_OBJECT_ID_NONE)) {
978 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
979 struct drm_connector *connector =
980 radeon_get_connector_for_encoder(radeon_crtc->encoder);
981 struct radeon_connector *radeon_connector =
982 to_radeon_connector(connector);
983 struct radeon_connector_atom_dig *dig_connector =
984 radeon_connector->con_priv;
985 int dp_clock;
986
987
988 radeon_connector->pixelclock_for_modeset = mode->clock;
989 radeon_crtc->bpc = radeon_get_monitor_bpc(connector);
990
991 switch (encoder_mode) {
992 case ATOM_ENCODER_MODE_DP_MST:
993 case ATOM_ENCODER_MODE_DP:
994
995 dp_clock = dig_connector->dp_clock / 10;
996 if (ASIC_IS_DCE4(rdev))
997 radeon_crtc->ss_enabled =
998 radeon_atombios_get_asic_ss_info(rdev, &radeon_crtc->ss,
999 ASIC_INTERNAL_SS_ON_DP,
1000 dp_clock);
1001 else {
1002 if (dp_clock == 16200) {
1003 radeon_crtc->ss_enabled =
1004 radeon_atombios_get_ppll_ss_info(rdev,
1005 &radeon_crtc->ss,
1006 ATOM_DP_SS_ID2);
1007 if (!radeon_crtc->ss_enabled)
1008 radeon_crtc->ss_enabled =
1009 radeon_atombios_get_ppll_ss_info(rdev,
1010 &radeon_crtc->ss,
1011 ATOM_DP_SS_ID1);
1012 } else {
1013 radeon_crtc->ss_enabled =
1014 radeon_atombios_get_ppll_ss_info(rdev,
1015 &radeon_crtc->ss,
1016 ATOM_DP_SS_ID1);
1017 }
1018
1019 radeon_crtc->ss_enabled = false;
1020 }
1021 break;
1022 case ATOM_ENCODER_MODE_LVDS:
1023 if (ASIC_IS_DCE4(rdev))
1024 radeon_crtc->ss_enabled =
1025 radeon_atombios_get_asic_ss_info(rdev,
1026 &radeon_crtc->ss,
1027 dig->lcd_ss_id,
1028 mode->clock / 10);
1029 else
1030 radeon_crtc->ss_enabled =
1031 radeon_atombios_get_ppll_ss_info(rdev,
1032 &radeon_crtc->ss,
1033 dig->lcd_ss_id);
1034 break;
1035 case ATOM_ENCODER_MODE_DVI:
1036 if (ASIC_IS_DCE4(rdev))
1037 radeon_crtc->ss_enabled =
1038 radeon_atombios_get_asic_ss_info(rdev,
1039 &radeon_crtc->ss,
1040 ASIC_INTERNAL_SS_ON_TMDS,
1041 mode->clock / 10);
1042 break;
1043 case ATOM_ENCODER_MODE_HDMI:
1044 if (ASIC_IS_DCE4(rdev))
1045 radeon_crtc->ss_enabled =
1046 radeon_atombios_get_asic_ss_info(rdev,
1047 &radeon_crtc->ss,
1048 ASIC_INTERNAL_SS_ON_HDMI,
1049 mode->clock / 10);
1050 break;
1051 default:
1052 break;
1053 }
1054 }
1055
1056
1057 radeon_crtc->adjusted_clock = atombios_adjust_pll(crtc, mode);
1058
1059 return true;
1060}
1061
1062static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
1063{
1064 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1065 struct drm_device *dev = crtc->dev;
1066 struct radeon_device *rdev = dev->dev_private;
1067 struct radeon_encoder *radeon_encoder =
1068 to_radeon_encoder(radeon_crtc->encoder);
1069 u32 pll_clock = mode->clock;
1070 u32 clock = mode->clock;
1071 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
1072 struct radeon_pll *pll;
1073 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
1074
1075
1076 if (ASIC_IS_DCE5(rdev) &&
1077 (encoder_mode == ATOM_ENCODER_MODE_HDMI) &&
1078 (radeon_crtc->bpc > 8))
1079 clock = radeon_crtc->adjusted_clock;
1080
1081 switch (radeon_crtc->pll_id) {
1082 case ATOM_PPLL1:
1083 pll = &rdev->clock.p1pll;
1084 break;
1085 case ATOM_PPLL2:
1086 pll = &rdev->clock.p2pll;
1087 break;
1088 case ATOM_DCPLL:
1089 case ATOM_PPLL_INVALID:
1090 default:
1091 pll = &rdev->clock.dcpll;
1092 break;
1093 }
1094
1095
1096 pll->flags = radeon_crtc->pll_flags;
1097 pll->reference_div = radeon_crtc->pll_reference_div;
1098 pll->post_div = radeon_crtc->pll_post_div;
1099
1100 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1101
1102 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1103 &fb_div, &frac_fb_div, &ref_div, &post_div);
1104 else if (ASIC_IS_AVIVO(rdev))
1105 radeon_compute_pll_avivo(pll, radeon_crtc->adjusted_clock, &pll_clock,
1106 &fb_div, &frac_fb_div, &ref_div, &post_div);
1107 else
1108 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1109 &fb_div, &frac_fb_div, &ref_div, &post_div);
1110
1111 atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id,
1112 radeon_crtc->crtc_id, &radeon_crtc->ss);
1113
1114 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1115 encoder_mode, radeon_encoder->encoder_id, clock,
1116 ref_div, fb_div, frac_fb_div, post_div,
1117 radeon_crtc->bpc, radeon_crtc->ss_enabled, &radeon_crtc->ss);
1118
1119 if (radeon_crtc->ss_enabled) {
1120
1121 if (ASIC_IS_DCE4(rdev)) {
1122 u32 step_size;
1123 u32 amount = (((fb_div * 10) + frac_fb_div) *
1124 (u32)radeon_crtc->ss.percentage) /
1125 (100 * (u32)radeon_crtc->ss.percentage_divider);
1126 radeon_crtc->ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK;
1127 radeon_crtc->ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) &
1128 ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK;
1129 if (radeon_crtc->ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD)
1130 step_size = (4 * amount * ref_div * ((u32)radeon_crtc->ss.rate * 2048)) /
1131 (125 * 25 * pll->reference_freq / 100);
1132 else
1133 step_size = (2 * amount * ref_div * ((u32)radeon_crtc->ss.rate * 2048)) /
1134 (125 * 25 * pll->reference_freq / 100);
1135 radeon_crtc->ss.step = step_size;
1136 }
1137
1138 atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id,
1139 radeon_crtc->crtc_id, &radeon_crtc->ss);
1140 }
1141}
1142
1143static int dce4_crtc_do_set_base(struct drm_crtc *crtc,
1144 struct drm_framebuffer *fb,
1145 int x, int y, int atomic)
1146{
1147 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1148 struct drm_device *dev = crtc->dev;
1149 struct radeon_device *rdev = dev->dev_private;
1150 struct drm_framebuffer *target_fb;
1151 struct drm_gem_object *obj;
1152 struct radeon_bo *rbo;
1153 uint64_t fb_location;
1154 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1155 unsigned bankw, bankh, mtaspect, tile_split;
1156 u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE);
1157 u32 tmp, viewport_w, viewport_h;
1158 int r;
1159 bool bypass_lut = false;
1160 struct drm_format_name_buf format_name;
1161
1162
1163 if (!atomic && !crtc->primary->fb) {
1164 DRM_DEBUG_KMS("No FB bound\n");
1165 return 0;
1166 }
1167
1168 if (atomic)
1169 target_fb = fb;
1170 else
1171 target_fb = crtc->primary->fb;
1172
1173
1174
1175
1176 obj = target_fb->obj[0];
1177 rbo = gem_to_radeon_bo(obj);
1178 r = radeon_bo_reserve(rbo, false);
1179 if (unlikely(r != 0))
1180 return r;
1181
1182 if (atomic)
1183 fb_location = radeon_bo_gpu_offset(rbo);
1184 else {
1185 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1186 if (unlikely(r != 0)) {
1187 radeon_bo_unreserve(rbo);
1188 return -EINVAL;
1189 }
1190 }
1191
1192 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1193 radeon_bo_unreserve(rbo);
1194
1195 switch (target_fb->format->format) {
1196 case DRM_FORMAT_C8:
1197 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
1198 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
1199 break;
1200 case DRM_FORMAT_XRGB4444:
1201 case DRM_FORMAT_ARGB4444:
1202 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1203 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB4444));
1204#ifdef __BIG_ENDIAN
1205 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1206#endif
1207 break;
1208 case DRM_FORMAT_XRGB1555:
1209 case DRM_FORMAT_ARGB1555:
1210 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1211 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
1212#ifdef __BIG_ENDIAN
1213 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1214#endif
1215 break;
1216 case DRM_FORMAT_BGRX5551:
1217 case DRM_FORMAT_BGRA5551:
1218 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1219 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA5551));
1220#ifdef __BIG_ENDIAN
1221 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1222#endif
1223 break;
1224 case DRM_FORMAT_RGB565:
1225 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1226 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
1227#ifdef __BIG_ENDIAN
1228 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1229#endif
1230 break;
1231 case DRM_FORMAT_XRGB8888:
1232 case DRM_FORMAT_ARGB8888:
1233 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1234 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1235#ifdef __BIG_ENDIAN
1236 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1237#endif
1238 break;
1239 case DRM_FORMAT_XRGB2101010:
1240 case DRM_FORMAT_ARGB2101010:
1241 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1242 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB2101010));
1243#ifdef __BIG_ENDIAN
1244 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1245#endif
1246
1247 bypass_lut = true;
1248 break;
1249 case DRM_FORMAT_BGRX1010102:
1250 case DRM_FORMAT_BGRA1010102:
1251 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1252 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA1010102));
1253#ifdef __BIG_ENDIAN
1254 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1255#endif
1256
1257 bypass_lut = true;
1258 break;
1259 case DRM_FORMAT_XBGR8888:
1260 case DRM_FORMAT_ABGR8888:
1261 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1262 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1263 fb_swap = (EVERGREEN_GRPH_RED_CROSSBAR(EVERGREEN_GRPH_RED_SEL_B) |
1264 EVERGREEN_GRPH_BLUE_CROSSBAR(EVERGREEN_GRPH_BLUE_SEL_R));
1265#ifdef __BIG_ENDIAN
1266 fb_swap |= EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1267#endif
1268 break;
1269 default:
1270 DRM_ERROR("Unsupported screen format %s\n",
1271 drm_get_format_name(target_fb->format->format, &format_name));
1272 return -EINVAL;
1273 }
1274
1275 if (tiling_flags & RADEON_TILING_MACRO) {
1276 evergreen_tiling_fields(tiling_flags, &bankw, &bankh, &mtaspect, &tile_split);
1277
1278
1279 if (rdev->family >= CHIP_TAHITI) {
1280 unsigned index, num_banks;
1281
1282 if (rdev->family >= CHIP_BONAIRE) {
1283 unsigned tileb, tile_split_bytes;
1284
1285
1286 tile_split_bytes = 64 << tile_split;
1287 tileb = 8 * 8 * target_fb->format->cpp[0];
1288 tileb = min(tile_split_bytes, tileb);
1289
1290 for (index = 0; tileb > 64; index++)
1291 tileb >>= 1;
1292
1293 if (index >= 16) {
1294 DRM_ERROR("Wrong screen bpp (%u) or tile split (%u)\n",
1295 target_fb->format->cpp[0] * 8,
1296 tile_split);
1297 return -EINVAL;
1298 }
1299
1300 num_banks = (rdev->config.cik.macrotile_mode_array[index] >> 6) & 0x3;
1301 } else {
1302 switch (target_fb->format->cpp[0] * 8) {
1303 case 8:
1304 index = 10;
1305 break;
1306 case 16:
1307 index = SI_TILE_MODE_COLOR_2D_SCANOUT_16BPP;
1308 break;
1309 default:
1310 case 32:
1311 index = SI_TILE_MODE_COLOR_2D_SCANOUT_32BPP;
1312 break;
1313 }
1314
1315 num_banks = (rdev->config.si.tile_mode_array[index] >> 20) & 0x3;
1316 }
1317
1318 fb_format |= EVERGREEN_GRPH_NUM_BANKS(num_banks);
1319 } else {
1320
1321 if (rdev->family >= CHIP_CAYMAN)
1322 tmp = rdev->config.cayman.tile_config;
1323 else
1324 tmp = rdev->config.evergreen.tile_config;
1325
1326 switch ((tmp & 0xf0) >> 4) {
1327 case 0:
1328 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_4_BANK);
1329 break;
1330 case 1:
1331 default:
1332 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_8_BANK);
1333 break;
1334 case 2:
1335 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_16_BANK);
1336 break;
1337 }
1338 }
1339
1340 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
1341 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(tile_split);
1342 fb_format |= EVERGREEN_GRPH_BANK_WIDTH(bankw);
1343 fb_format |= EVERGREEN_GRPH_BANK_HEIGHT(bankh);
1344 fb_format |= EVERGREEN_GRPH_MACRO_TILE_ASPECT(mtaspect);
1345 if (rdev->family >= CHIP_BONAIRE) {
1346
1347 fb_format |= CIK_GRPH_MICRO_TILE_MODE(CIK_DISPLAY_MICRO_TILING);
1348 }
1349 } else if (tiling_flags & RADEON_TILING_MICRO)
1350 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
1351
1352 if (rdev->family >= CHIP_BONAIRE) {
1353
1354
1355
1356 u32 pipe_config = (rdev->config.cik.tile_mode_array[10] >> 6) & 0x1f;
1357
1358 fb_format |= CIK_GRPH_PIPE_CONFIG(pipe_config);
1359 } else if ((rdev->family == CHIP_TAHITI) ||
1360 (rdev->family == CHIP_PITCAIRN))
1361 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P8_32x32_8x16);
1362 else if ((rdev->family == CHIP_VERDE) ||
1363 (rdev->family == CHIP_OLAND) ||
1364 (rdev->family == CHIP_HAINAN))
1365 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P4_8x16);
1366
1367 switch (radeon_crtc->crtc_id) {
1368 case 0:
1369 WREG32(AVIVO_D1VGA_CONTROL, 0);
1370 break;
1371 case 1:
1372 WREG32(AVIVO_D2VGA_CONTROL, 0);
1373 break;
1374 case 2:
1375 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1376 break;
1377 case 3:
1378 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1379 break;
1380 case 4:
1381 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1382 break;
1383 case 5:
1384 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1385 break;
1386 default:
1387 break;
1388 }
1389
1390
1391
1392
1393 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, 0);
1394
1395 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1396 upper_32_bits(fb_location));
1397 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1398 upper_32_bits(fb_location));
1399 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1400 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1401 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1402 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1403 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1404 WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1405
1406
1407
1408
1409
1410
1411 WREG32_P(EVERGREEN_GRPH_LUT_10BIT_BYPASS_CONTROL + radeon_crtc->crtc_offset,
1412 (bypass_lut ? EVERGREEN_LUT_10BIT_BYPASS_EN : 0),
1413 ~EVERGREEN_LUT_10BIT_BYPASS_EN);
1414
1415 if (bypass_lut)
1416 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1417
1418 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1419 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1420 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
1421 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1422 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1423 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1424
1425 fb_pitch_pixels = target_fb->pitches[0] / target_fb->format->cpp[0];
1426 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1427 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1428
1429 if (rdev->family >= CHIP_BONAIRE)
1430 WREG32(CIK_LB_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1431 target_fb->height);
1432 else
1433 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1434 target_fb->height);
1435 x &= ~3;
1436 y &= ~1;
1437 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
1438 (x << 16) | y);
1439 viewport_w = crtc->mode.hdisplay;
1440 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1441 if ((rdev->family >= CHIP_BONAIRE) &&
1442 (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE))
1443 viewport_h *= 2;
1444 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1445 (viewport_w << 16) | viewport_h);
1446
1447
1448 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1449
1450 if (!atomic && fb && fb != crtc->primary->fb) {
1451 rbo = gem_to_radeon_bo(fb->obj[0]);
1452 r = radeon_bo_reserve(rbo, false);
1453 if (unlikely(r != 0))
1454 return r;
1455 radeon_bo_unpin(rbo);
1456 radeon_bo_unreserve(rbo);
1457 }
1458
1459
1460 radeon_bandwidth_update(rdev);
1461
1462 return 0;
1463}
1464
1465static int avivo_crtc_do_set_base(struct drm_crtc *crtc,
1466 struct drm_framebuffer *fb,
1467 int x, int y, int atomic)
1468{
1469 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1470 struct drm_device *dev = crtc->dev;
1471 struct radeon_device *rdev = dev->dev_private;
1472 struct drm_gem_object *obj;
1473 struct radeon_bo *rbo;
1474 struct drm_framebuffer *target_fb;
1475 uint64_t fb_location;
1476 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1477 u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE;
1478 u32 viewport_w, viewport_h;
1479 int r;
1480 bool bypass_lut = false;
1481 struct drm_format_name_buf format_name;
1482
1483
1484 if (!atomic && !crtc->primary->fb) {
1485 DRM_DEBUG_KMS("No FB bound\n");
1486 return 0;
1487 }
1488
1489 if (atomic)
1490 target_fb = fb;
1491 else
1492 target_fb = crtc->primary->fb;
1493
1494 obj = target_fb->obj[0];
1495 rbo = gem_to_radeon_bo(obj);
1496 r = radeon_bo_reserve(rbo, false);
1497 if (unlikely(r != 0))
1498 return r;
1499
1500
1501
1502
1503 if (atomic)
1504 fb_location = radeon_bo_gpu_offset(rbo);
1505 else {
1506 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1507 if (unlikely(r != 0)) {
1508 radeon_bo_unreserve(rbo);
1509 return -EINVAL;
1510 }
1511 }
1512 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1513 radeon_bo_unreserve(rbo);
1514
1515 switch (target_fb->format->format) {
1516 case DRM_FORMAT_C8:
1517 fb_format =
1518 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1519 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1520 break;
1521 case DRM_FORMAT_XRGB4444:
1522 case DRM_FORMAT_ARGB4444:
1523 fb_format =
1524 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1525 AVIVO_D1GRPH_CONTROL_16BPP_ARGB4444;
1526#ifdef __BIG_ENDIAN
1527 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1528#endif
1529 break;
1530 case DRM_FORMAT_XRGB1555:
1531 fb_format =
1532 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1533 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1534#ifdef __BIG_ENDIAN
1535 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1536#endif
1537 break;
1538 case DRM_FORMAT_RGB565:
1539 fb_format =
1540 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1541 AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1542#ifdef __BIG_ENDIAN
1543 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1544#endif
1545 break;
1546 case DRM_FORMAT_XRGB8888:
1547 case DRM_FORMAT_ARGB8888:
1548 fb_format =
1549 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1550 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1551#ifdef __BIG_ENDIAN
1552 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1553#endif
1554 break;
1555 case DRM_FORMAT_XRGB2101010:
1556 case DRM_FORMAT_ARGB2101010:
1557 fb_format =
1558 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1559 AVIVO_D1GRPH_CONTROL_32BPP_ARGB2101010;
1560#ifdef __BIG_ENDIAN
1561 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1562#endif
1563
1564 bypass_lut = true;
1565 break;
1566 case DRM_FORMAT_XBGR8888:
1567 case DRM_FORMAT_ABGR8888:
1568 fb_format =
1569 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1570 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1571 if (rdev->family >= CHIP_R600)
1572 fb_swap =
1573 (R600_D1GRPH_RED_CROSSBAR(R600_D1GRPH_RED_SEL_B) |
1574 R600_D1GRPH_BLUE_CROSSBAR(R600_D1GRPH_BLUE_SEL_R));
1575 else
1576 fb_format |= AVIVO_D1GRPH_SWAP_RB;
1577#ifdef __BIG_ENDIAN
1578 fb_swap |= R600_D1GRPH_SWAP_ENDIAN_32BIT;
1579#endif
1580 break;
1581 default:
1582 DRM_ERROR("Unsupported screen format %s\n",
1583 drm_get_format_name(target_fb->format->format, &format_name));
1584 return -EINVAL;
1585 }
1586
1587 if (rdev->family >= CHIP_R600) {
1588 if (tiling_flags & RADEON_TILING_MACRO)
1589 fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1590 else if (tiling_flags & RADEON_TILING_MICRO)
1591 fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1592 } else {
1593 if (tiling_flags & RADEON_TILING_MACRO)
1594 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
1595
1596 if (tiling_flags & RADEON_TILING_MICRO)
1597 fb_format |= AVIVO_D1GRPH_TILED;
1598 }
1599
1600 if (radeon_crtc->crtc_id == 0)
1601 WREG32(AVIVO_D1VGA_CONTROL, 0);
1602 else
1603 WREG32(AVIVO_D2VGA_CONTROL, 0);
1604
1605
1606
1607
1608 WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, 0);
1609
1610 if (rdev->family >= CHIP_RV770) {
1611 if (radeon_crtc->crtc_id) {
1612 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1613 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1614 } else {
1615 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1616 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1617 }
1618 }
1619 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1620 (u32) fb_location);
1621 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1622 radeon_crtc->crtc_offset, (u32) fb_location);
1623 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1624 if (rdev->family >= CHIP_R600)
1625 WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1626
1627
1628 WREG32_P(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset,
1629 (bypass_lut ? AVIVO_LUT_10BIT_BYPASS_EN : 0), ~AVIVO_LUT_10BIT_BYPASS_EN);
1630
1631 if (bypass_lut)
1632 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1633
1634 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1635 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1636 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1637 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1638 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1639 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1640
1641 fb_pitch_pixels = target_fb->pitches[0] / target_fb->format->cpp[0];
1642 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1643 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1644
1645 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1646 target_fb->height);
1647 x &= ~3;
1648 y &= ~1;
1649 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1650 (x << 16) | y);
1651 viewport_w = crtc->mode.hdisplay;
1652 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1653 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1654 (viewport_w << 16) | viewport_h);
1655
1656
1657 WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 3);
1658
1659 if (!atomic && fb && fb != crtc->primary->fb) {
1660 rbo = gem_to_radeon_bo(fb->obj[0]);
1661 r = radeon_bo_reserve(rbo, false);
1662 if (unlikely(r != 0))
1663 return r;
1664 radeon_bo_unpin(rbo);
1665 radeon_bo_unreserve(rbo);
1666 }
1667
1668
1669 radeon_bandwidth_update(rdev);
1670
1671 return 0;
1672}
1673
1674int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1675 struct drm_framebuffer *old_fb)
1676{
1677 struct drm_device *dev = crtc->dev;
1678 struct radeon_device *rdev = dev->dev_private;
1679
1680 if (ASIC_IS_DCE4(rdev))
1681 return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0);
1682 else if (ASIC_IS_AVIVO(rdev))
1683 return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0);
1684 else
1685 return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0);
1686}
1687
1688int atombios_crtc_set_base_atomic(struct drm_crtc *crtc,
1689 struct drm_framebuffer *fb,
1690 int x, int y, enum mode_set_atomic state)
1691{
1692 struct drm_device *dev = crtc->dev;
1693 struct radeon_device *rdev = dev->dev_private;
1694
1695 if (ASIC_IS_DCE4(rdev))
1696 return dce4_crtc_do_set_base(crtc, fb, x, y, 1);
1697 else if (ASIC_IS_AVIVO(rdev))
1698 return avivo_crtc_do_set_base(crtc, fb, x, y, 1);
1699 else
1700 return radeon_crtc_do_set_base(crtc, fb, x, y, 1);
1701}
1702
1703
1704static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1705{
1706 struct drm_device *dev = crtc->dev;
1707 struct radeon_device *rdev = dev->dev_private;
1708 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1709 u32 disp_merge_cntl;
1710
1711 switch (radeon_crtc->crtc_id) {
1712 case 0:
1713 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1714 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1715 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1716 break;
1717 case 1:
1718 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1719 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1720 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1721 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1722 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1723 break;
1724 }
1725}
1726
1727
1728
1729
1730
1731
1732
1733
1734static u32 radeon_get_pll_use_mask(struct drm_crtc *crtc)
1735{
1736 struct drm_device *dev = crtc->dev;
1737 struct drm_crtc *test_crtc;
1738 struct radeon_crtc *test_radeon_crtc;
1739 u32 pll_in_use = 0;
1740
1741 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1742 if (crtc == test_crtc)
1743 continue;
1744
1745 test_radeon_crtc = to_radeon_crtc(test_crtc);
1746 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1747 pll_in_use |= (1 << test_radeon_crtc->pll_id);
1748 }
1749 return pll_in_use;
1750}
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761static int radeon_get_shared_dp_ppll(struct drm_crtc *crtc)
1762{
1763 struct drm_device *dev = crtc->dev;
1764 struct radeon_device *rdev = dev->dev_private;
1765 struct drm_crtc *test_crtc;
1766 struct radeon_crtc *test_radeon_crtc;
1767
1768 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1769 if (crtc == test_crtc)
1770 continue;
1771 test_radeon_crtc = to_radeon_crtc(test_crtc);
1772 if (test_radeon_crtc->encoder &&
1773 ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1774
1775 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) &&
1776 test_radeon_crtc->pll_id == ATOM_PPLL2)
1777 continue;
1778
1779 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1780 return test_radeon_crtc->pll_id;
1781 }
1782 }
1783 return ATOM_PPLL_INVALID;
1784}
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795static int radeon_get_shared_nondp_ppll(struct drm_crtc *crtc)
1796{
1797 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1798 struct drm_device *dev = crtc->dev;
1799 struct radeon_device *rdev = dev->dev_private;
1800 struct drm_crtc *test_crtc;
1801 struct radeon_crtc *test_radeon_crtc;
1802 u32 adjusted_clock, test_adjusted_clock;
1803
1804 adjusted_clock = radeon_crtc->adjusted_clock;
1805
1806 if (adjusted_clock == 0)
1807 return ATOM_PPLL_INVALID;
1808
1809 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1810 if (crtc == test_crtc)
1811 continue;
1812 test_radeon_crtc = to_radeon_crtc(test_crtc);
1813 if (test_radeon_crtc->encoder &&
1814 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1815
1816 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) &&
1817 test_radeon_crtc->pll_id == ATOM_PPLL2)
1818 continue;
1819
1820 if (test_radeon_crtc->connector == radeon_crtc->connector) {
1821
1822 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1823 return test_radeon_crtc->pll_id;
1824 }
1825
1826 test_adjusted_clock = test_radeon_crtc->adjusted_clock;
1827 if ((crtc->mode.clock == test_crtc->mode.clock) &&
1828 (adjusted_clock == test_adjusted_clock) &&
1829 (radeon_crtc->ss_enabled == test_radeon_crtc->ss_enabled) &&
1830 (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID))
1831 return test_radeon_crtc->pll_id;
1832 }
1833 }
1834 return ATOM_PPLL_INVALID;
1835}
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1875{
1876 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1877 struct drm_device *dev = crtc->dev;
1878 struct radeon_device *rdev = dev->dev_private;
1879 struct radeon_encoder *radeon_encoder =
1880 to_radeon_encoder(radeon_crtc->encoder);
1881 u32 pll_in_use;
1882 int pll;
1883
1884 if (ASIC_IS_DCE8(rdev)) {
1885 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1886 if (rdev->clock.dp_extclk)
1887
1888 return ATOM_PPLL_INVALID;
1889 else {
1890
1891 pll = radeon_get_shared_dp_ppll(crtc);
1892 if (pll != ATOM_PPLL_INVALID)
1893 return pll;
1894 }
1895 } else {
1896
1897 pll = radeon_get_shared_nondp_ppll(crtc);
1898 if (pll != ATOM_PPLL_INVALID)
1899 return pll;
1900 }
1901
1902 if ((rdev->family == CHIP_KABINI) ||
1903 (rdev->family == CHIP_MULLINS)) {
1904
1905 pll_in_use = radeon_get_pll_use_mask(crtc);
1906 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1907 return ATOM_PPLL2;
1908 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1909 return ATOM_PPLL1;
1910 DRM_ERROR("unable to allocate a PPLL\n");
1911 return ATOM_PPLL_INVALID;
1912 } else {
1913
1914 pll_in_use = radeon_get_pll_use_mask(crtc);
1915 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1916 return ATOM_PPLL2;
1917 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1918 return ATOM_PPLL1;
1919 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1920 return ATOM_PPLL0;
1921 DRM_ERROR("unable to allocate a PPLL\n");
1922 return ATOM_PPLL_INVALID;
1923 }
1924 } else if (ASIC_IS_DCE61(rdev)) {
1925 struct radeon_encoder_atom_dig *dig =
1926 radeon_encoder->enc_priv;
1927
1928 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY) &&
1929 (dig->linkb == false))
1930
1931 return ATOM_PPLL2;
1932 else if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1933
1934 if (rdev->clock.dp_extclk)
1935
1936 return ATOM_PPLL_INVALID;
1937 else {
1938
1939 pll = radeon_get_shared_dp_ppll(crtc);
1940 if (pll != ATOM_PPLL_INVALID)
1941 return pll;
1942 }
1943 } else {
1944
1945 pll = radeon_get_shared_nondp_ppll(crtc);
1946 if (pll != ATOM_PPLL_INVALID)
1947 return pll;
1948 }
1949
1950 pll_in_use = radeon_get_pll_use_mask(crtc);
1951 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1952 return ATOM_PPLL0;
1953 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1954 return ATOM_PPLL1;
1955 DRM_ERROR("unable to allocate a PPLL\n");
1956 return ATOM_PPLL_INVALID;
1957 } else if (ASIC_IS_DCE41(rdev)) {
1958
1959 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1960 if (rdev->clock.dp_extclk)
1961
1962 return ATOM_PPLL_INVALID;
1963 }
1964 pll_in_use = radeon_get_pll_use_mask(crtc);
1965 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1966 return ATOM_PPLL1;
1967 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1968 return ATOM_PPLL2;
1969 DRM_ERROR("unable to allocate a PPLL\n");
1970 return ATOM_PPLL_INVALID;
1971 } else if (ASIC_IS_DCE4(rdev)) {
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1983 if (rdev->clock.dp_extclk)
1984
1985 return ATOM_PPLL_INVALID;
1986 else if (ASIC_IS_DCE6(rdev))
1987
1988 return ATOM_PPLL0;
1989 else if (ASIC_IS_DCE5(rdev))
1990
1991 return ATOM_DCPLL;
1992 else {
1993
1994 pll = radeon_get_shared_dp_ppll(crtc);
1995 if (pll != ATOM_PPLL_INVALID)
1996 return pll;
1997 }
1998 } else {
1999
2000 pll = radeon_get_shared_nondp_ppll(crtc);
2001 if (pll != ATOM_PPLL_INVALID)
2002 return pll;
2003 }
2004
2005 pll_in_use = radeon_get_pll_use_mask(crtc);
2006 if (!(pll_in_use & (1 << ATOM_PPLL1)))
2007 return ATOM_PPLL1;
2008 if (!(pll_in_use & (1 << ATOM_PPLL2)))
2009 return ATOM_PPLL2;
2010 DRM_ERROR("unable to allocate a PPLL\n");
2011 return ATOM_PPLL_INVALID;
2012 } else {
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028 return radeon_crtc->crtc_id;
2029 }
2030}
2031
2032void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev)
2033{
2034
2035 if (ASIC_IS_DCE6(rdev))
2036 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
2037 else if (ASIC_IS_DCE4(rdev)) {
2038 struct radeon_atom_ss ss;
2039 bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
2040 ASIC_INTERNAL_SS_ON_DCPLL,
2041 rdev->clock.default_dispclk);
2042 if (ss_enabled)
2043 atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, -1, &ss);
2044
2045 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
2046 if (ss_enabled)
2047 atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, -1, &ss);
2048 }
2049
2050}
2051
2052int atombios_crtc_mode_set(struct drm_crtc *crtc,
2053 struct drm_display_mode *mode,
2054 struct drm_display_mode *adjusted_mode,
2055 int x, int y, struct drm_framebuffer *old_fb)
2056{
2057 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2058 struct drm_device *dev = crtc->dev;
2059 struct radeon_device *rdev = dev->dev_private;
2060 struct radeon_encoder *radeon_encoder =
2061 to_radeon_encoder(radeon_crtc->encoder);
2062 bool is_tvcv = false;
2063
2064 if (radeon_encoder->active_device &
2065 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
2066 is_tvcv = true;
2067
2068 if (!radeon_crtc->adjusted_clock)
2069 return -EINVAL;
2070
2071 atombios_crtc_set_pll(crtc, adjusted_mode);
2072
2073 if (ASIC_IS_DCE4(rdev))
2074 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2075 else if (ASIC_IS_AVIVO(rdev)) {
2076 if (is_tvcv)
2077 atombios_crtc_set_timing(crtc, adjusted_mode);
2078 else
2079 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2080 } else {
2081 atombios_crtc_set_timing(crtc, adjusted_mode);
2082 if (radeon_crtc->crtc_id == 0)
2083 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2084 radeon_legacy_atom_fixup(crtc);
2085 }
2086 atombios_crtc_set_base(crtc, x, y, old_fb);
2087 atombios_overscan_setup(crtc, mode, adjusted_mode);
2088 atombios_scaler_setup(crtc);
2089 radeon_cursor_reset(crtc);
2090
2091 radeon_crtc->hw_mode = *adjusted_mode;
2092
2093 return 0;
2094}
2095
2096static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
2097 const struct drm_display_mode *mode,
2098 struct drm_display_mode *adjusted_mode)
2099{
2100 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2101 struct drm_device *dev = crtc->dev;
2102 struct drm_encoder *encoder;
2103
2104
2105 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2106 if (encoder->crtc == crtc) {
2107 radeon_crtc->encoder = encoder;
2108 radeon_crtc->connector = radeon_get_connector_for_encoder(encoder);
2109 break;
2110 }
2111 }
2112 if ((radeon_crtc->encoder == NULL) || (radeon_crtc->connector == NULL)) {
2113 radeon_crtc->encoder = NULL;
2114 radeon_crtc->connector = NULL;
2115 return false;
2116 }
2117 if (radeon_crtc->encoder) {
2118 struct radeon_encoder *radeon_encoder =
2119 to_radeon_encoder(radeon_crtc->encoder);
2120
2121 radeon_crtc->output_csc = radeon_encoder->output_csc;
2122 }
2123 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
2124 return false;
2125 if (!atombios_crtc_prepare_pll(crtc, adjusted_mode))
2126 return false;
2127
2128 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
2129
2130 if ((radeon_crtc->pll_id == ATOM_PPLL_INVALID) &&
2131 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder)))
2132 return false;
2133
2134 return true;
2135}
2136
2137static void atombios_crtc_prepare(struct drm_crtc *crtc)
2138{
2139 struct drm_device *dev = crtc->dev;
2140 struct radeon_device *rdev = dev->dev_private;
2141
2142
2143 if (ASIC_IS_DCE6(rdev))
2144 atombios_powergate_crtc(crtc, ATOM_DISABLE);
2145
2146 atombios_lock_crtc(crtc, ATOM_ENABLE);
2147 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2148}
2149
2150static void atombios_crtc_commit(struct drm_crtc *crtc)
2151{
2152 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
2153 atombios_lock_crtc(crtc, ATOM_DISABLE);
2154}
2155
2156static void atombios_crtc_disable(struct drm_crtc *crtc)
2157{
2158 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2159 struct drm_device *dev = crtc->dev;
2160 struct radeon_device *rdev = dev->dev_private;
2161 struct radeon_atom_ss ss;
2162 int i;
2163
2164 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2165 if (crtc->primary->fb) {
2166 int r;
2167 struct radeon_bo *rbo;
2168
2169 rbo = gem_to_radeon_bo(crtc->primary->fb->obj[0]);
2170 r = radeon_bo_reserve(rbo, false);
2171 if (unlikely(r))
2172 DRM_ERROR("failed to reserve rbo before unpin\n");
2173 else {
2174 radeon_bo_unpin(rbo);
2175 radeon_bo_unreserve(rbo);
2176 }
2177 }
2178
2179 if (ASIC_IS_DCE4(rdev))
2180 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 0);
2181 else if (ASIC_IS_AVIVO(rdev))
2182 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 0);
2183
2184 if (ASIC_IS_DCE6(rdev))
2185 atombios_powergate_crtc(crtc, ATOM_ENABLE);
2186
2187 for (i = 0; i < rdev->num_crtc; i++) {
2188 if (rdev->mode_info.crtcs[i] &&
2189 rdev->mode_info.crtcs[i]->enabled &&
2190 i != radeon_crtc->crtc_id &&
2191 radeon_crtc->pll_id == rdev->mode_info.crtcs[i]->pll_id) {
2192
2193
2194
2195 goto done;
2196 }
2197 }
2198
2199 switch (radeon_crtc->pll_id) {
2200 case ATOM_PPLL1:
2201 case ATOM_PPLL2:
2202
2203 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
2204 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2205 break;
2206 case ATOM_PPLL0:
2207
2208 if ((rdev->family == CHIP_ARUBA) ||
2209 (rdev->family == CHIP_KAVERI) ||
2210 (rdev->family == CHIP_BONAIRE) ||
2211 (rdev->family == CHIP_HAWAII))
2212 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
2213 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2214 break;
2215 default:
2216 break;
2217 }
2218done:
2219 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
2220 radeon_crtc->adjusted_clock = 0;
2221 radeon_crtc->encoder = NULL;
2222 radeon_crtc->connector = NULL;
2223}
2224
2225static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
2226 .dpms = atombios_crtc_dpms,
2227 .mode_fixup = atombios_crtc_mode_fixup,
2228 .mode_set = atombios_crtc_mode_set,
2229 .mode_set_base = atombios_crtc_set_base,
2230 .mode_set_base_atomic = atombios_crtc_set_base_atomic,
2231 .prepare = atombios_crtc_prepare,
2232 .commit = atombios_crtc_commit,
2233 .disable = atombios_crtc_disable,
2234 .get_scanout_position = radeon_get_crtc_scanout_position,
2235};
2236
2237void radeon_atombios_init_crtc(struct drm_device *dev,
2238 struct radeon_crtc *radeon_crtc)
2239{
2240 struct radeon_device *rdev = dev->dev_private;
2241
2242 if (ASIC_IS_DCE4(rdev)) {
2243 switch (radeon_crtc->crtc_id) {
2244 case 0:
2245 default:
2246 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
2247 break;
2248 case 1:
2249 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
2250 break;
2251 case 2:
2252 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
2253 break;
2254 case 3:
2255 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
2256 break;
2257 case 4:
2258 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
2259 break;
2260 case 5:
2261 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
2262 break;
2263 }
2264 } else {
2265 if (radeon_crtc->crtc_id == 1)
2266 radeon_crtc->crtc_offset =
2267 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
2268 else
2269 radeon_crtc->crtc_offset = 0;
2270 }
2271 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
2272 radeon_crtc->adjusted_clock = 0;
2273 radeon_crtc->encoder = NULL;
2274 radeon_crtc->connector = NULL;
2275 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
2276}
2277