1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <drm/drmP.h>
27#include <drm/radeon_drm.h>
28#include "radeon.h"
29
30#include "atom.h"
31#include <asm/div64.h>
32
33#include <linux/pm_runtime.h>
34#include <drm/drm_crtc_helper.h>
35#include <drm/drm_plane_helper.h>
36#include <drm/drm_edid.h>
37
38#include <linux/gcd.h>
39
40static void avivo_crtc_load_lut(struct drm_crtc *crtc)
41{
42 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
43 struct drm_device *dev = crtc->dev;
44 struct radeon_device *rdev = dev->dev_private;
45 int i;
46
47 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
48 WREG32(AVIVO_DC_LUTA_CONTROL + radeon_crtc->crtc_offset, 0);
49
50 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
51 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
52 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
53
54 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
55 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
56 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
57
58 WREG32(AVIVO_DC_LUT_RW_SELECT, radeon_crtc->crtc_id);
59 WREG32(AVIVO_DC_LUT_RW_MODE, 0);
60 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK, 0x0000003f);
61
62 WREG8(AVIVO_DC_LUT_RW_INDEX, 0);
63 for (i = 0; i < 256; i++) {
64 WREG32(AVIVO_DC_LUT_30_COLOR,
65 (radeon_crtc->lut_r[i] << 20) |
66 (radeon_crtc->lut_g[i] << 10) |
67 (radeon_crtc->lut_b[i] << 0));
68 }
69
70
71 WREG32_P(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id, ~1);
72}
73
74static void dce4_crtc_load_lut(struct drm_crtc *crtc)
75{
76 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
77 struct drm_device *dev = crtc->dev;
78 struct radeon_device *rdev = dev->dev_private;
79 int i;
80
81 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
82 WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
83
84 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
85 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
86 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
87
88 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
89 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
90 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
91
92 WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
93 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
94
95 WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
96 for (i = 0; i < 256; i++) {
97 WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
98 (radeon_crtc->lut_r[i] << 20) |
99 (radeon_crtc->lut_g[i] << 10) |
100 (radeon_crtc->lut_b[i] << 0));
101 }
102}
103
104static void dce5_crtc_load_lut(struct drm_crtc *crtc)
105{
106 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
107 struct drm_device *dev = crtc->dev;
108 struct radeon_device *rdev = dev->dev_private;
109 int i;
110
111 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
112
113 WREG32(NI_INPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
114 (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS) |
115 NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS)));
116 WREG32(NI_PRESCALE_GRPH_CONTROL + radeon_crtc->crtc_offset,
117 NI_GRPH_PRESCALE_BYPASS);
118 WREG32(NI_PRESCALE_OVL_CONTROL + radeon_crtc->crtc_offset,
119 NI_OVL_PRESCALE_BYPASS);
120 WREG32(NI_INPUT_GAMMA_CONTROL + radeon_crtc->crtc_offset,
121 (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT) |
122 NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT)));
123
124 WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
125
126 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
127 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
128 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
129
130 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
131 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
132 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
133
134 WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
135 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
136
137 WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
138 for (i = 0; i < 256; i++) {
139 WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
140 (radeon_crtc->lut_r[i] << 20) |
141 (radeon_crtc->lut_g[i] << 10) |
142 (radeon_crtc->lut_b[i] << 0));
143 }
144
145 WREG32(NI_DEGAMMA_CONTROL + radeon_crtc->crtc_offset,
146 (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
147 NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
148 NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
149 NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS)));
150 WREG32(NI_GAMUT_REMAP_CONTROL + radeon_crtc->crtc_offset,
151 (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS) |
152 NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS)));
153 WREG32(NI_REGAMMA_CONTROL + radeon_crtc->crtc_offset,
154 (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS) |
155 NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS)));
156 WREG32(NI_OUTPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
157 (NI_OUTPUT_CSC_GRPH_MODE(radeon_crtc->output_csc) |
158 NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS)));
159
160 WREG32(0x6940 + radeon_crtc->crtc_offset, 0);
161 if (ASIC_IS_DCE8(rdev)) {
162
163
164
165 WREG32(CIK_ALPHA_CONTROL + radeon_crtc->crtc_offset,
166 CIK_CURSOR_ALPHA_BLND_ENA);
167 }
168}
169
170static void legacy_crtc_load_lut(struct drm_crtc *crtc)
171{
172 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
173 struct drm_device *dev = crtc->dev;
174 struct radeon_device *rdev = dev->dev_private;
175 int i;
176 uint32_t dac2_cntl;
177
178 dac2_cntl = RREG32(RADEON_DAC_CNTL2);
179 if (radeon_crtc->crtc_id == 0)
180 dac2_cntl &= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL;
181 else
182 dac2_cntl |= RADEON_DAC2_PALETTE_ACC_CTL;
183 WREG32(RADEON_DAC_CNTL2, dac2_cntl);
184
185 WREG8(RADEON_PALETTE_INDEX, 0);
186 for (i = 0; i < 256; i++) {
187 WREG32(RADEON_PALETTE_30_DATA,
188 (radeon_crtc->lut_r[i] << 20) |
189 (radeon_crtc->lut_g[i] << 10) |
190 (radeon_crtc->lut_b[i] << 0));
191 }
192}
193
194void radeon_crtc_load_lut(struct drm_crtc *crtc)
195{
196 struct drm_device *dev = crtc->dev;
197 struct radeon_device *rdev = dev->dev_private;
198
199 if (!crtc->enabled)
200 return;
201
202 if (ASIC_IS_DCE5(rdev))
203 dce5_crtc_load_lut(crtc);
204 else if (ASIC_IS_DCE4(rdev))
205 dce4_crtc_load_lut(crtc);
206 else if (ASIC_IS_AVIVO(rdev))
207 avivo_crtc_load_lut(crtc);
208 else
209 legacy_crtc_load_lut(crtc);
210}
211
212
213void radeon_crtc_fb_gamma_set(struct drm_crtc *crtc, u16 red, u16 green,
214 u16 blue, int regno)
215{
216 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
217
218 radeon_crtc->lut_r[regno] = red >> 6;
219 radeon_crtc->lut_g[regno] = green >> 6;
220 radeon_crtc->lut_b[regno] = blue >> 6;
221}
222
223
224void radeon_crtc_fb_gamma_get(struct drm_crtc *crtc, u16 *red, u16 *green,
225 u16 *blue, int regno)
226{
227 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
228
229 *red = radeon_crtc->lut_r[regno] << 6;
230 *green = radeon_crtc->lut_g[regno] << 6;
231 *blue = radeon_crtc->lut_b[regno] << 6;
232}
233
234static void radeon_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green,
235 u16 *blue, uint32_t start, uint32_t size)
236{
237 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
238 int end = (start + size > 256) ? 256 : start + size, i;
239
240
241 for (i = start; i < end; i++) {
242 radeon_crtc->lut_r[i] = red[i] >> 6;
243 radeon_crtc->lut_g[i] = green[i] >> 6;
244 radeon_crtc->lut_b[i] = blue[i] >> 6;
245 }
246 radeon_crtc_load_lut(crtc);
247}
248
249static void radeon_crtc_destroy(struct drm_crtc *crtc)
250{
251 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
252
253 drm_crtc_cleanup(crtc);
254 destroy_workqueue(radeon_crtc->flip_queue);
255 kfree(radeon_crtc);
256}
257
258
259
260
261
262
263
264
265static void radeon_unpin_work_func(struct work_struct *__work)
266{
267 struct radeon_flip_work *work =
268 container_of(__work, struct radeon_flip_work, unpin_work);
269 int r;
270
271
272 r = radeon_bo_reserve(work->old_rbo, false);
273 if (likely(r == 0)) {
274 r = radeon_bo_unpin(work->old_rbo);
275 if (unlikely(r != 0)) {
276 DRM_ERROR("failed to unpin buffer after flip\n");
277 }
278 radeon_bo_unreserve(work->old_rbo);
279 } else
280 DRM_ERROR("failed to reserve buffer after flip\n");
281
282 drm_gem_object_unreference_unlocked(&work->old_rbo->gem_base);
283 kfree(work);
284}
285
286void radeon_crtc_handle_vblank(struct radeon_device *rdev, int crtc_id)
287{
288 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
289 unsigned long flags;
290 u32 update_pending;
291 int vpos, hpos;
292
293
294 if (radeon_crtc == NULL)
295 return;
296
297
298
299
300
301
302
303
304
305
306 if ((radeon_use_pflipirq == 2) && ASIC_IS_DCE4(rdev))
307 return;
308
309 spin_lock_irqsave(&rdev->ddev->event_lock, flags);
310 if (radeon_crtc->flip_status != RADEON_FLIP_SUBMITTED) {
311 DRM_DEBUG_DRIVER("radeon_crtc->flip_status = %d != "
312 "RADEON_FLIP_SUBMITTED(%d)\n",
313 radeon_crtc->flip_status,
314 RADEON_FLIP_SUBMITTED);
315 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
316 return;
317 }
318
319 update_pending = radeon_page_flip_pending(rdev, crtc_id);
320
321
322
323
324 if (update_pending &&
325 (DRM_SCANOUTPOS_VALID & radeon_get_crtc_scanoutpos(rdev->ddev, crtc_id, 0,
326 &vpos, &hpos, NULL, NULL)) &&
327 ((vpos >= (99 * rdev->mode_info.crtcs[crtc_id]->base.hwmode.crtc_vdisplay)/100) ||
328 (vpos < 0 && !ASIC_IS_AVIVO(rdev)))) {
329
330
331
332
333
334
335 update_pending = 0;
336 }
337 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
338 if (!update_pending)
339 radeon_crtc_handle_flip(rdev, crtc_id);
340}
341
342
343
344
345
346
347
348
349
350void radeon_crtc_handle_flip(struct radeon_device *rdev, int crtc_id)
351{
352 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
353 struct radeon_flip_work *work;
354 unsigned long flags;
355
356
357 if (radeon_crtc == NULL)
358 return;
359
360 spin_lock_irqsave(&rdev->ddev->event_lock, flags);
361 work = radeon_crtc->flip_work;
362 if (radeon_crtc->flip_status != RADEON_FLIP_SUBMITTED) {
363 DRM_DEBUG_DRIVER("radeon_crtc->flip_status = %d != "
364 "RADEON_FLIP_SUBMITTED(%d)\n",
365 radeon_crtc->flip_status,
366 RADEON_FLIP_SUBMITTED);
367 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
368 return;
369 }
370
371
372 radeon_crtc->flip_status = RADEON_FLIP_NONE;
373 radeon_crtc->flip_work = NULL;
374
375
376 if (work->event)
377 drm_send_vblank_event(rdev->ddev, crtc_id, work->event);
378
379 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
380
381 drm_vblank_put(rdev->ddev, radeon_crtc->crtc_id);
382 radeon_irq_kms_pflip_irq_put(rdev, work->crtc_id);
383 queue_work(radeon_crtc->flip_queue, &work->unpin_work);
384}
385
386
387
388
389
390
391
392
393static void radeon_flip_work_func(struct work_struct *__work)
394{
395 struct radeon_flip_work *work =
396 container_of(__work, struct radeon_flip_work, flip_work);
397 struct radeon_device *rdev = work->rdev;
398 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[work->crtc_id];
399
400 struct drm_crtc *crtc = &radeon_crtc->base;
401 unsigned long flags;
402 int r;
403
404 down_read(&rdev->exclusive_lock);
405 if (work->fence) {
406 struct radeon_fence *fence;
407
408 fence = to_radeon_fence(work->fence);
409 if (fence && fence->rdev == rdev) {
410 r = radeon_fence_wait(fence, false);
411 if (r == -EDEADLK) {
412 up_read(&rdev->exclusive_lock);
413 do {
414 r = radeon_gpu_reset(rdev);
415 } while (r == -EAGAIN);
416 down_read(&rdev->exclusive_lock);
417 }
418 } else
419 r = fence_wait(work->fence, false);
420
421 if (r)
422 DRM_ERROR("failed to wait on page flip fence (%d)!\n", r);
423
424
425
426
427
428
429 fence_put(work->fence);
430 work->fence = NULL;
431 }
432
433
434 spin_lock_irqsave(&crtc->dev->event_lock, flags);
435
436
437 radeon_irq_kms_pflip_irq_get(rdev, radeon_crtc->crtc_id);
438
439
440 radeon_page_flip(rdev, radeon_crtc->crtc_id, work->base);
441
442 radeon_crtc->flip_status = RADEON_FLIP_SUBMITTED;
443 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
444 up_read(&rdev->exclusive_lock);
445}
446
447static int radeon_crtc_page_flip(struct drm_crtc *crtc,
448 struct drm_framebuffer *fb,
449 struct drm_pending_vblank_event *event,
450 uint32_t page_flip_flags)
451{
452 struct drm_device *dev = crtc->dev;
453 struct radeon_device *rdev = dev->dev_private;
454 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
455 struct radeon_framebuffer *old_radeon_fb;
456 struct radeon_framebuffer *new_radeon_fb;
457 struct drm_gem_object *obj;
458 struct radeon_flip_work *work;
459 struct radeon_bo *new_rbo;
460 uint32_t tiling_flags, pitch_pixels;
461 uint64_t base;
462 unsigned long flags;
463 int r;
464
465 work = kzalloc(sizeof *work, GFP_KERNEL);
466 if (work == NULL)
467 return -ENOMEM;
468
469 INIT_WORK(&work->flip_work, radeon_flip_work_func);
470 INIT_WORK(&work->unpin_work, radeon_unpin_work_func);
471
472 work->rdev = rdev;
473 work->crtc_id = radeon_crtc->crtc_id;
474 work->event = event;
475
476
477 old_radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
478 obj = old_radeon_fb->obj;
479
480
481 drm_gem_object_reference(obj);
482 work->old_rbo = gem_to_radeon_bo(obj);
483
484 new_radeon_fb = to_radeon_framebuffer(fb);
485 obj = new_radeon_fb->obj;
486 new_rbo = gem_to_radeon_bo(obj);
487
488
489 DRM_DEBUG_DRIVER("flip-ioctl() cur_rbo = %p, new_rbo = %p\n",
490 work->old_rbo, new_rbo);
491
492 r = radeon_bo_reserve(new_rbo, false);
493 if (unlikely(r != 0)) {
494 DRM_ERROR("failed to reserve new rbo buffer before flip\n");
495 goto cleanup;
496 }
497
498 r = radeon_bo_pin_restricted(new_rbo, RADEON_GEM_DOMAIN_VRAM,
499 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, &base);
500 if (unlikely(r != 0)) {
501 radeon_bo_unreserve(new_rbo);
502 r = -EINVAL;
503 DRM_ERROR("failed to pin new rbo buffer before flip\n");
504 goto cleanup;
505 }
506 work->fence = fence_get(reservation_object_get_excl(new_rbo->tbo.resv));
507 radeon_bo_get_tiling_flags(new_rbo, &tiling_flags, NULL);
508 radeon_bo_unreserve(new_rbo);
509
510 if (!ASIC_IS_AVIVO(rdev)) {
511
512 base -= radeon_crtc->legacy_display_base_addr;
513 pitch_pixels = fb->pitches[0] / (fb->bits_per_pixel / 8);
514
515 if (tiling_flags & RADEON_TILING_MACRO) {
516 if (ASIC_IS_R300(rdev)) {
517 base &= ~0x7ff;
518 } else {
519 int byteshift = fb->bits_per_pixel >> 4;
520 int tile_addr = (((crtc->y >> 3) * pitch_pixels + crtc->x) >> (8 - byteshift)) << 11;
521 base += tile_addr + ((crtc->x << byteshift) % 256) + ((crtc->y % 8) << 8);
522 }
523 } else {
524 int offset = crtc->y * pitch_pixels + crtc->x;
525 switch (fb->bits_per_pixel) {
526 case 8:
527 default:
528 offset *= 1;
529 break;
530 case 15:
531 case 16:
532 offset *= 2;
533 break;
534 case 24:
535 offset *= 3;
536 break;
537 case 32:
538 offset *= 4;
539 break;
540 }
541 base += offset;
542 }
543 base &= ~7;
544 }
545 work->base = base;
546
547 r = drm_vblank_get(crtc->dev, radeon_crtc->crtc_id);
548 if (r) {
549 DRM_ERROR("failed to get vblank before flip\n");
550 goto pflip_cleanup;
551 }
552
553
554 spin_lock_irqsave(&crtc->dev->event_lock, flags);
555
556 if (radeon_crtc->flip_status != RADEON_FLIP_NONE) {
557 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
558 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
559 r = -EBUSY;
560 goto vblank_cleanup;
561 }
562 radeon_crtc->flip_status = RADEON_FLIP_PENDING;
563 radeon_crtc->flip_work = work;
564
565
566 crtc->primary->fb = fb;
567
568 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
569
570 queue_work(radeon_crtc->flip_queue, &work->flip_work);
571 return 0;
572
573vblank_cleanup:
574 drm_vblank_put(crtc->dev, radeon_crtc->crtc_id);
575
576pflip_cleanup:
577 if (unlikely(radeon_bo_reserve(new_rbo, false) != 0)) {
578 DRM_ERROR("failed to reserve new rbo in error path\n");
579 goto cleanup;
580 }
581 if (unlikely(radeon_bo_unpin(new_rbo) != 0)) {
582 DRM_ERROR("failed to unpin new rbo in error path\n");
583 }
584 radeon_bo_unreserve(new_rbo);
585
586cleanup:
587 drm_gem_object_unreference_unlocked(&work->old_rbo->gem_base);
588 fence_put(work->fence);
589 kfree(work);
590 return r;
591}
592
593static int
594radeon_crtc_set_config(struct drm_mode_set *set)
595{
596 struct drm_device *dev;
597 struct radeon_device *rdev;
598 struct drm_crtc *crtc;
599 bool active = false;
600 int ret;
601
602 if (!set || !set->crtc)
603 return -EINVAL;
604
605 dev = set->crtc->dev;
606
607 ret = pm_runtime_get_sync(dev->dev);
608 if (ret < 0)
609 return ret;
610
611 ret = drm_crtc_helper_set_config(set);
612
613 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head)
614 if (crtc->enabled)
615 active = true;
616
617 pm_runtime_mark_last_busy(dev->dev);
618
619 rdev = dev->dev_private;
620
621
622 if (active && !rdev->have_disp_power_ref) {
623 rdev->have_disp_power_ref = true;
624 return ret;
625 }
626
627
628 if (!active && rdev->have_disp_power_ref) {
629 pm_runtime_put_autosuspend(dev->dev);
630 rdev->have_disp_power_ref = false;
631 }
632
633
634 pm_runtime_put_autosuspend(dev->dev);
635 return ret;
636}
637static const struct drm_crtc_funcs radeon_crtc_funcs = {
638 .cursor_set2 = radeon_crtc_cursor_set2,
639 .cursor_move = radeon_crtc_cursor_move,
640 .gamma_set = radeon_crtc_gamma_set,
641 .set_config = radeon_crtc_set_config,
642 .destroy = radeon_crtc_destroy,
643 .page_flip = radeon_crtc_page_flip,
644};
645
646static void radeon_crtc_init(struct drm_device *dev, int index)
647{
648 struct radeon_device *rdev = dev->dev_private;
649 struct radeon_crtc *radeon_crtc;
650 int i;
651
652 radeon_crtc = kzalloc(sizeof(struct radeon_crtc) + (RADEONFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
653 if (radeon_crtc == NULL)
654 return;
655
656 drm_crtc_init(dev, &radeon_crtc->base, &radeon_crtc_funcs);
657
658 drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256);
659 radeon_crtc->crtc_id = index;
660 radeon_crtc->flip_queue = create_singlethread_workqueue("radeon-crtc");
661 rdev->mode_info.crtcs[index] = radeon_crtc;
662
663 if (rdev->family >= CHIP_BONAIRE) {
664 radeon_crtc->max_cursor_width = CIK_CURSOR_WIDTH;
665 radeon_crtc->max_cursor_height = CIK_CURSOR_HEIGHT;
666 } else {
667 radeon_crtc->max_cursor_width = CURSOR_WIDTH;
668 radeon_crtc->max_cursor_height = CURSOR_HEIGHT;
669 }
670 dev->mode_config.cursor_width = radeon_crtc->max_cursor_width;
671 dev->mode_config.cursor_height = radeon_crtc->max_cursor_height;
672
673#if 0
674 radeon_crtc->mode_set.crtc = &radeon_crtc->base;
675 radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1);
676 radeon_crtc->mode_set.num_connectors = 0;
677#endif
678
679 for (i = 0; i < 256; i++) {
680 radeon_crtc->lut_r[i] = i << 2;
681 radeon_crtc->lut_g[i] = i << 2;
682 radeon_crtc->lut_b[i] = i << 2;
683 }
684
685 if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom))
686 radeon_atombios_init_crtc(dev, radeon_crtc);
687 else
688 radeon_legacy_init_crtc(dev, radeon_crtc);
689}
690
691static const char *encoder_names[38] = {
692 "NONE",
693 "INTERNAL_LVDS",
694 "INTERNAL_TMDS1",
695 "INTERNAL_TMDS2",
696 "INTERNAL_DAC1",
697 "INTERNAL_DAC2",
698 "INTERNAL_SDVOA",
699 "INTERNAL_SDVOB",
700 "SI170B",
701 "CH7303",
702 "CH7301",
703 "INTERNAL_DVO1",
704 "EXTERNAL_SDVOA",
705 "EXTERNAL_SDVOB",
706 "TITFP513",
707 "INTERNAL_LVTM1",
708 "VT1623",
709 "HDMI_SI1930",
710 "HDMI_INTERNAL",
711 "INTERNAL_KLDSCP_TMDS1",
712 "INTERNAL_KLDSCP_DVO1",
713 "INTERNAL_KLDSCP_DAC1",
714 "INTERNAL_KLDSCP_DAC2",
715 "SI178",
716 "MVPU_FPGA",
717 "INTERNAL_DDI",
718 "VT1625",
719 "HDMI_SI1932",
720 "DP_AN9801",
721 "DP_DP501",
722 "INTERNAL_UNIPHY",
723 "INTERNAL_KLDSCP_LVTMA",
724 "INTERNAL_UNIPHY1",
725 "INTERNAL_UNIPHY2",
726 "NUTMEG",
727 "TRAVIS",
728 "INTERNAL_VCE",
729 "INTERNAL_UNIPHY3",
730};
731
732static const char *hpd_names[6] = {
733 "HPD1",
734 "HPD2",
735 "HPD3",
736 "HPD4",
737 "HPD5",
738 "HPD6",
739};
740
741static void radeon_print_display_setup(struct drm_device *dev)
742{
743 struct drm_connector *connector;
744 struct radeon_connector *radeon_connector;
745 struct drm_encoder *encoder;
746 struct radeon_encoder *radeon_encoder;
747 uint32_t devices;
748 int i = 0;
749
750 DRM_INFO("Radeon Display Connectors\n");
751 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
752 radeon_connector = to_radeon_connector(connector);
753 DRM_INFO("Connector %d:\n", i);
754 DRM_INFO(" %s\n", connector->name);
755 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
756 DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]);
757 if (radeon_connector->ddc_bus) {
758 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
759 radeon_connector->ddc_bus->rec.mask_clk_reg,
760 radeon_connector->ddc_bus->rec.mask_data_reg,
761 radeon_connector->ddc_bus->rec.a_clk_reg,
762 radeon_connector->ddc_bus->rec.a_data_reg,
763 radeon_connector->ddc_bus->rec.en_clk_reg,
764 radeon_connector->ddc_bus->rec.en_data_reg,
765 radeon_connector->ddc_bus->rec.y_clk_reg,
766 radeon_connector->ddc_bus->rec.y_data_reg);
767 if (radeon_connector->router.ddc_valid)
768 DRM_INFO(" DDC Router 0x%x/0x%x\n",
769 radeon_connector->router.ddc_mux_control_pin,
770 radeon_connector->router.ddc_mux_state);
771 if (radeon_connector->router.cd_valid)
772 DRM_INFO(" Clock/Data Router 0x%x/0x%x\n",
773 radeon_connector->router.cd_mux_control_pin,
774 radeon_connector->router.cd_mux_state);
775 } else {
776 if (connector->connector_type == DRM_MODE_CONNECTOR_VGA ||
777 connector->connector_type == DRM_MODE_CONNECTOR_DVII ||
778 connector->connector_type == DRM_MODE_CONNECTOR_DVID ||
779 connector->connector_type == DRM_MODE_CONNECTOR_DVIA ||
780 connector->connector_type == DRM_MODE_CONNECTOR_HDMIA ||
781 connector->connector_type == DRM_MODE_CONNECTOR_HDMIB)
782 DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
783 }
784 DRM_INFO(" Encoders:\n");
785 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
786 radeon_encoder = to_radeon_encoder(encoder);
787 devices = radeon_encoder->devices & radeon_connector->devices;
788 if (devices) {
789 if (devices & ATOM_DEVICE_CRT1_SUPPORT)
790 DRM_INFO(" CRT1: %s\n", encoder_names[radeon_encoder->encoder_id]);
791 if (devices & ATOM_DEVICE_CRT2_SUPPORT)
792 DRM_INFO(" CRT2: %s\n", encoder_names[radeon_encoder->encoder_id]);
793 if (devices & ATOM_DEVICE_LCD1_SUPPORT)
794 DRM_INFO(" LCD1: %s\n", encoder_names[radeon_encoder->encoder_id]);
795 if (devices & ATOM_DEVICE_DFP1_SUPPORT)
796 DRM_INFO(" DFP1: %s\n", encoder_names[radeon_encoder->encoder_id]);
797 if (devices & ATOM_DEVICE_DFP2_SUPPORT)
798 DRM_INFO(" DFP2: %s\n", encoder_names[radeon_encoder->encoder_id]);
799 if (devices & ATOM_DEVICE_DFP3_SUPPORT)
800 DRM_INFO(" DFP3: %s\n", encoder_names[radeon_encoder->encoder_id]);
801 if (devices & ATOM_DEVICE_DFP4_SUPPORT)
802 DRM_INFO(" DFP4: %s\n", encoder_names[radeon_encoder->encoder_id]);
803 if (devices & ATOM_DEVICE_DFP5_SUPPORT)
804 DRM_INFO(" DFP5: %s\n", encoder_names[radeon_encoder->encoder_id]);
805 if (devices & ATOM_DEVICE_DFP6_SUPPORT)
806 DRM_INFO(" DFP6: %s\n", encoder_names[radeon_encoder->encoder_id]);
807 if (devices & ATOM_DEVICE_TV1_SUPPORT)
808 DRM_INFO(" TV1: %s\n", encoder_names[radeon_encoder->encoder_id]);
809 if (devices & ATOM_DEVICE_CV_SUPPORT)
810 DRM_INFO(" CV: %s\n", encoder_names[radeon_encoder->encoder_id]);
811 }
812 }
813 i++;
814 }
815}
816
817static bool radeon_setup_enc_conn(struct drm_device *dev)
818{
819 struct radeon_device *rdev = dev->dev_private;
820 bool ret = false;
821
822 if (rdev->bios) {
823 if (rdev->is_atom_bios) {
824 ret = radeon_get_atom_connector_info_from_supported_devices_table(dev);
825 if (ret == false)
826 ret = radeon_get_atom_connector_info_from_object_table(dev);
827 } else {
828 ret = radeon_get_legacy_connector_info_from_bios(dev);
829 if (ret == false)
830 ret = radeon_get_legacy_connector_info_from_table(dev);
831 }
832 } else {
833 if (!ASIC_IS_AVIVO(rdev))
834 ret = radeon_get_legacy_connector_info_from_table(dev);
835 }
836 if (ret) {
837 radeon_setup_encoder_clones(dev);
838 radeon_print_display_setup(dev);
839 }
840
841 return ret;
842}
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858static void avivo_reduce_ratio(unsigned *nom, unsigned *den,
859 unsigned nom_min, unsigned den_min)
860{
861 unsigned tmp;
862
863
864 tmp = gcd(*nom, *den);
865 *nom /= tmp;
866 *den /= tmp;
867
868
869 if (*nom < nom_min) {
870 tmp = DIV_ROUND_UP(nom_min, *nom);
871 *nom *= tmp;
872 *den *= tmp;
873 }
874
875
876 if (*den < den_min) {
877 tmp = DIV_ROUND_UP(den_min, *den);
878 *nom *= tmp;
879 *den *= tmp;
880 }
881}
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897static void avivo_get_fb_ref_div(unsigned nom, unsigned den, unsigned post_div,
898 unsigned fb_div_max, unsigned ref_div_max,
899 unsigned *fb_div, unsigned *ref_div)
900{
901
902 ref_div_max = max(min(100 / post_div, ref_div_max), 1u);
903
904
905 *ref_div = min(max(DIV_ROUND_CLOSEST(den, post_div), 1u), ref_div_max);
906 *fb_div = DIV_ROUND_CLOSEST(nom * *ref_div * post_div, den);
907
908
909 if (*fb_div > fb_div_max) {
910 *ref_div = DIV_ROUND_CLOSEST(*ref_div * fb_div_max, *fb_div);
911 *fb_div = fb_div_max;
912 }
913}
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928void radeon_compute_pll_avivo(struct radeon_pll *pll,
929 u32 freq,
930 u32 *dot_clock_p,
931 u32 *fb_div_p,
932 u32 *frac_fb_div_p,
933 u32 *ref_div_p,
934 u32 *post_div_p)
935{
936 unsigned target_clock = pll->flags & RADEON_PLL_USE_FRAC_FB_DIV ?
937 freq : freq / 10;
938
939 unsigned fb_div_min, fb_div_max, fb_div;
940 unsigned post_div_min, post_div_max, post_div;
941 unsigned ref_div_min, ref_div_max, ref_div;
942 unsigned post_div_best, diff_best;
943 unsigned nom, den;
944
945
946 fb_div_min = pll->min_feedback_div;
947 fb_div_max = pll->max_feedback_div;
948
949 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
950 fb_div_min *= 10;
951 fb_div_max *= 10;
952 }
953
954
955 if (pll->flags & RADEON_PLL_USE_REF_DIV)
956 ref_div_min = pll->reference_div;
957 else
958 ref_div_min = pll->min_ref_div;
959
960 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV &&
961 pll->flags & RADEON_PLL_USE_REF_DIV)
962 ref_div_max = pll->reference_div;
963 else if (pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP)
964
965 ref_div_max = min(pll->max_ref_div, 7u);
966 else
967 ref_div_max = pll->max_ref_div;
968
969
970 if (pll->flags & RADEON_PLL_USE_POST_DIV) {
971 post_div_min = pll->post_div;
972 post_div_max = pll->post_div;
973 } else {
974 unsigned vco_min, vco_max;
975
976 if (pll->flags & RADEON_PLL_IS_LCD) {
977 vco_min = pll->lcd_pll_out_min;
978 vco_max = pll->lcd_pll_out_max;
979 } else {
980 vco_min = pll->pll_out_min;
981 vco_max = pll->pll_out_max;
982 }
983
984 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
985 vco_min *= 10;
986 vco_max *= 10;
987 }
988
989 post_div_min = vco_min / target_clock;
990 if ((target_clock * post_div_min) < vco_min)
991 ++post_div_min;
992 if (post_div_min < pll->min_post_div)
993 post_div_min = pll->min_post_div;
994
995 post_div_max = vco_max / target_clock;
996 if ((target_clock * post_div_max) > vco_max)
997 --post_div_max;
998 if (post_div_max > pll->max_post_div)
999 post_div_max = pll->max_post_div;
1000 }
1001
1002
1003 nom = target_clock;
1004 den = pll->reference_freq;
1005
1006
1007 avivo_reduce_ratio(&nom, &den, fb_div_min, post_div_min);
1008
1009
1010 if (pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP)
1011 post_div_best = post_div_min;
1012 else
1013 post_div_best = post_div_max;
1014 diff_best = ~0;
1015
1016 for (post_div = post_div_min; post_div <= post_div_max; ++post_div) {
1017 unsigned diff;
1018 avivo_get_fb_ref_div(nom, den, post_div, fb_div_max,
1019 ref_div_max, &fb_div, &ref_div);
1020 diff = abs(target_clock - (pll->reference_freq * fb_div) /
1021 (ref_div * post_div));
1022
1023 if (diff < diff_best || (diff == diff_best &&
1024 !(pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP))) {
1025
1026 post_div_best = post_div;
1027 diff_best = diff;
1028 }
1029 }
1030 post_div = post_div_best;
1031
1032
1033 avivo_get_fb_ref_div(nom, den, post_div, fb_div_max, ref_div_max,
1034 &fb_div, &ref_div);
1035
1036
1037
1038 avivo_reduce_ratio(&fb_div, &ref_div, fb_div_min, ref_div_min);
1039
1040
1041 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV && (fb_div % 10)) {
1042 fb_div_min = max(fb_div_min, (9 - (fb_div % 10)) * 20 + 50);
1043 if (fb_div < fb_div_min) {
1044 unsigned tmp = DIV_ROUND_UP(fb_div_min, fb_div);
1045 fb_div *= tmp;
1046 ref_div *= tmp;
1047 }
1048 }
1049
1050
1051 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1052 *fb_div_p = fb_div / 10;
1053 *frac_fb_div_p = fb_div % 10;
1054 } else {
1055 *fb_div_p = fb_div;
1056 *frac_fb_div_p = 0;
1057 }
1058
1059 *dot_clock_p = ((pll->reference_freq * *fb_div_p * 10) +
1060 (pll->reference_freq * *frac_fb_div_p)) /
1061 (ref_div * post_div * 10);
1062 *ref_div_p = ref_div;
1063 *post_div_p = post_div;
1064
1065 DRM_DEBUG_KMS("%d - %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1066 freq, *dot_clock_p * 10, *fb_div_p, *frac_fb_div_p,
1067 ref_div, post_div);
1068}
1069
1070
1071static inline uint32_t radeon_div(uint64_t n, uint32_t d)
1072{
1073 uint64_t mod;
1074
1075 n += d / 2;
1076
1077 mod = do_div(n, d);
1078 return n;
1079}
1080
1081void radeon_compute_pll_legacy(struct radeon_pll *pll,
1082 uint64_t freq,
1083 uint32_t *dot_clock_p,
1084 uint32_t *fb_div_p,
1085 uint32_t *frac_fb_div_p,
1086 uint32_t *ref_div_p,
1087 uint32_t *post_div_p)
1088{
1089 uint32_t min_ref_div = pll->min_ref_div;
1090 uint32_t max_ref_div = pll->max_ref_div;
1091 uint32_t min_post_div = pll->min_post_div;
1092 uint32_t max_post_div = pll->max_post_div;
1093 uint32_t min_fractional_feed_div = 0;
1094 uint32_t max_fractional_feed_div = 0;
1095 uint32_t best_vco = pll->best_vco;
1096 uint32_t best_post_div = 1;
1097 uint32_t best_ref_div = 1;
1098 uint32_t best_feedback_div = 1;
1099 uint32_t best_frac_feedback_div = 0;
1100 uint32_t best_freq = -1;
1101 uint32_t best_error = 0xffffffff;
1102 uint32_t best_vco_diff = 1;
1103 uint32_t post_div;
1104 u32 pll_out_min, pll_out_max;
1105
1106 DRM_DEBUG_KMS("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div);
1107 freq = freq * 1000;
1108
1109 if (pll->flags & RADEON_PLL_IS_LCD) {
1110 pll_out_min = pll->lcd_pll_out_min;
1111 pll_out_max = pll->lcd_pll_out_max;
1112 } else {
1113 pll_out_min = pll->pll_out_min;
1114 pll_out_max = pll->pll_out_max;
1115 }
1116
1117 if (pll_out_min > 64800)
1118 pll_out_min = 64800;
1119
1120 if (pll->flags & RADEON_PLL_USE_REF_DIV)
1121 min_ref_div = max_ref_div = pll->reference_div;
1122 else {
1123 while (min_ref_div < max_ref_div-1) {
1124 uint32_t mid = (min_ref_div + max_ref_div) / 2;
1125 uint32_t pll_in = pll->reference_freq / mid;
1126 if (pll_in < pll->pll_in_min)
1127 max_ref_div = mid;
1128 else if (pll_in > pll->pll_in_max)
1129 min_ref_div = mid;
1130 else
1131 break;
1132 }
1133 }
1134
1135 if (pll->flags & RADEON_PLL_USE_POST_DIV)
1136 min_post_div = max_post_div = pll->post_div;
1137
1138 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1139 min_fractional_feed_div = pll->min_frac_feedback_div;
1140 max_fractional_feed_div = pll->max_frac_feedback_div;
1141 }
1142
1143 for (post_div = max_post_div; post_div >= min_post_div; --post_div) {
1144 uint32_t ref_div;
1145
1146 if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1))
1147 continue;
1148
1149
1150 if (pll->flags & RADEON_PLL_LEGACY) {
1151 if ((post_div == 5) ||
1152 (post_div == 7) ||
1153 (post_div == 9) ||
1154 (post_div == 10) ||
1155 (post_div == 11) ||
1156 (post_div == 13) ||
1157 (post_div == 14) ||
1158 (post_div == 15))
1159 continue;
1160 }
1161
1162 for (ref_div = min_ref_div; ref_div <= max_ref_div; ++ref_div) {
1163 uint32_t feedback_div, current_freq = 0, error, vco_diff;
1164 uint32_t pll_in = pll->reference_freq / ref_div;
1165 uint32_t min_feed_div = pll->min_feedback_div;
1166 uint32_t max_feed_div = pll->max_feedback_div + 1;
1167
1168 if (pll_in < pll->pll_in_min || pll_in > pll->pll_in_max)
1169 continue;
1170
1171 while (min_feed_div < max_feed_div) {
1172 uint32_t vco;
1173 uint32_t min_frac_feed_div = min_fractional_feed_div;
1174 uint32_t max_frac_feed_div = max_fractional_feed_div + 1;
1175 uint32_t frac_feedback_div;
1176 uint64_t tmp;
1177
1178 feedback_div = (min_feed_div + max_feed_div) / 2;
1179
1180 tmp = (uint64_t)pll->reference_freq * feedback_div;
1181 vco = radeon_div(tmp, ref_div);
1182
1183 if (vco < pll_out_min) {
1184 min_feed_div = feedback_div + 1;
1185 continue;
1186 } else if (vco > pll_out_max) {
1187 max_feed_div = feedback_div;
1188 continue;
1189 }
1190
1191 while (min_frac_feed_div < max_frac_feed_div) {
1192 frac_feedback_div = (min_frac_feed_div + max_frac_feed_div) / 2;
1193 tmp = (uint64_t)pll->reference_freq * 10000 * feedback_div;
1194 tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div;
1195 current_freq = radeon_div(tmp, ref_div * post_div);
1196
1197 if (pll->flags & RADEON_PLL_PREFER_CLOSEST_LOWER) {
1198 if (freq < current_freq)
1199 error = 0xffffffff;
1200 else
1201 error = freq - current_freq;
1202 } else
1203 error = abs(current_freq - freq);
1204 vco_diff = abs(vco - best_vco);
1205
1206 if ((best_vco == 0 && error < best_error) ||
1207 (best_vco != 0 &&
1208 ((best_error > 100 && error < best_error - 100) ||
1209 (abs(error - best_error) < 100 && vco_diff < best_vco_diff)))) {
1210 best_post_div = post_div;
1211 best_ref_div = ref_div;
1212 best_feedback_div = feedback_div;
1213 best_frac_feedback_div = frac_feedback_div;
1214 best_freq = current_freq;
1215 best_error = error;
1216 best_vco_diff = vco_diff;
1217 } else if (current_freq == freq) {
1218 if (best_freq == -1) {
1219 best_post_div = post_div;
1220 best_ref_div = ref_div;
1221 best_feedback_div = feedback_div;
1222 best_frac_feedback_div = frac_feedback_div;
1223 best_freq = current_freq;
1224 best_error = error;
1225 best_vco_diff = vco_diff;
1226 } else if (((pll->flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) ||
1227 ((pll->flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) ||
1228 ((pll->flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) ||
1229 ((pll->flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) ||
1230 ((pll->flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) ||
1231 ((pll->flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) {
1232 best_post_div = post_div;
1233 best_ref_div = ref_div;
1234 best_feedback_div = feedback_div;
1235 best_frac_feedback_div = frac_feedback_div;
1236 best_freq = current_freq;
1237 best_error = error;
1238 best_vco_diff = vco_diff;
1239 }
1240 }
1241 if (current_freq < freq)
1242 min_frac_feed_div = frac_feedback_div + 1;
1243 else
1244 max_frac_feed_div = frac_feedback_div;
1245 }
1246 if (current_freq < freq)
1247 min_feed_div = feedback_div + 1;
1248 else
1249 max_feed_div = feedback_div;
1250 }
1251 }
1252 }
1253
1254 *dot_clock_p = best_freq / 10000;
1255 *fb_div_p = best_feedback_div;
1256 *frac_fb_div_p = best_frac_feedback_div;
1257 *ref_div_p = best_ref_div;
1258 *post_div_p = best_post_div;
1259 DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1260 (long long)freq,
1261 best_freq / 1000, best_feedback_div, best_frac_feedback_div,
1262 best_ref_div, best_post_div);
1263
1264}
1265
1266static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb)
1267{
1268 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
1269
1270 if (radeon_fb->obj) {
1271 drm_gem_object_unreference_unlocked(radeon_fb->obj);
1272 }
1273 drm_framebuffer_cleanup(fb);
1274 kfree(radeon_fb);
1275}
1276
1277static int radeon_user_framebuffer_create_handle(struct drm_framebuffer *fb,
1278 struct drm_file *file_priv,
1279 unsigned int *handle)
1280{
1281 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
1282
1283 return drm_gem_handle_create(file_priv, radeon_fb->obj, handle);
1284}
1285
1286static const struct drm_framebuffer_funcs radeon_fb_funcs = {
1287 .destroy = radeon_user_framebuffer_destroy,
1288 .create_handle = radeon_user_framebuffer_create_handle,
1289};
1290
1291int
1292radeon_framebuffer_init(struct drm_device *dev,
1293 struct radeon_framebuffer *rfb,
1294 struct drm_mode_fb_cmd2 *mode_cmd,
1295 struct drm_gem_object *obj)
1296{
1297 int ret;
1298 rfb->obj = obj;
1299 drm_helper_mode_fill_fb_struct(&rfb->base, mode_cmd);
1300 ret = drm_framebuffer_init(dev, &rfb->base, &radeon_fb_funcs);
1301 if (ret) {
1302 rfb->obj = NULL;
1303 return ret;
1304 }
1305 return 0;
1306}
1307
1308static struct drm_framebuffer *
1309radeon_user_framebuffer_create(struct drm_device *dev,
1310 struct drm_file *file_priv,
1311 struct drm_mode_fb_cmd2 *mode_cmd)
1312{
1313 struct drm_gem_object *obj;
1314 struct radeon_framebuffer *radeon_fb;
1315 int ret;
1316
1317 obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]);
1318 if (obj == NULL) {
1319 dev_err(&dev->pdev->dev, "No GEM object associated to handle 0x%08X, "
1320 "can't create framebuffer\n", mode_cmd->handles[0]);
1321 return ERR_PTR(-ENOENT);
1322 }
1323
1324 radeon_fb = kzalloc(sizeof(*radeon_fb), GFP_KERNEL);
1325 if (radeon_fb == NULL) {
1326 drm_gem_object_unreference_unlocked(obj);
1327 return ERR_PTR(-ENOMEM);
1328 }
1329
1330 ret = radeon_framebuffer_init(dev, radeon_fb, mode_cmd, obj);
1331 if (ret) {
1332 kfree(radeon_fb);
1333 drm_gem_object_unreference_unlocked(obj);
1334 return ERR_PTR(ret);
1335 }
1336
1337 return &radeon_fb->base;
1338}
1339
1340static void radeon_output_poll_changed(struct drm_device *dev)
1341{
1342 struct radeon_device *rdev = dev->dev_private;
1343 radeon_fb_output_poll_changed(rdev);
1344}
1345
1346static const struct drm_mode_config_funcs radeon_mode_funcs = {
1347 .fb_create = radeon_user_framebuffer_create,
1348 .output_poll_changed = radeon_output_poll_changed
1349};
1350
1351static struct drm_prop_enum_list radeon_tmds_pll_enum_list[] =
1352{ { 0, "driver" },
1353 { 1, "bios" },
1354};
1355
1356static struct drm_prop_enum_list radeon_tv_std_enum_list[] =
1357{ { TV_STD_NTSC, "ntsc" },
1358 { TV_STD_PAL, "pal" },
1359 { TV_STD_PAL_M, "pal-m" },
1360 { TV_STD_PAL_60, "pal-60" },
1361 { TV_STD_NTSC_J, "ntsc-j" },
1362 { TV_STD_SCART_PAL, "scart-pal" },
1363 { TV_STD_PAL_CN, "pal-cn" },
1364 { TV_STD_SECAM, "secam" },
1365};
1366
1367static struct drm_prop_enum_list radeon_underscan_enum_list[] =
1368{ { UNDERSCAN_OFF, "off" },
1369 { UNDERSCAN_ON, "on" },
1370 { UNDERSCAN_AUTO, "auto" },
1371};
1372
1373static struct drm_prop_enum_list radeon_audio_enum_list[] =
1374{ { RADEON_AUDIO_DISABLE, "off" },
1375 { RADEON_AUDIO_ENABLE, "on" },
1376 { RADEON_AUDIO_AUTO, "auto" },
1377};
1378
1379
1380static struct drm_prop_enum_list radeon_dither_enum_list[] =
1381{ { RADEON_FMT_DITHER_DISABLE, "off" },
1382 { RADEON_FMT_DITHER_ENABLE, "on" },
1383};
1384
1385static struct drm_prop_enum_list radeon_output_csc_enum_list[] =
1386{ { RADEON_OUTPUT_CSC_BYPASS, "bypass" },
1387 { RADEON_OUTPUT_CSC_TVRGB, "tvrgb" },
1388 { RADEON_OUTPUT_CSC_YCBCR601, "ycbcr601" },
1389 { RADEON_OUTPUT_CSC_YCBCR709, "ycbcr709" },
1390};
1391
1392static int radeon_modeset_create_props(struct radeon_device *rdev)
1393{
1394 int sz;
1395
1396 if (rdev->is_atom_bios) {
1397 rdev->mode_info.coherent_mode_property =
1398 drm_property_create_range(rdev->ddev, 0 , "coherent", 0, 1);
1399 if (!rdev->mode_info.coherent_mode_property)
1400 return -ENOMEM;
1401 }
1402
1403 if (!ASIC_IS_AVIVO(rdev)) {
1404 sz = ARRAY_SIZE(radeon_tmds_pll_enum_list);
1405 rdev->mode_info.tmds_pll_property =
1406 drm_property_create_enum(rdev->ddev, 0,
1407 "tmds_pll",
1408 radeon_tmds_pll_enum_list, sz);
1409 }
1410
1411 rdev->mode_info.load_detect_property =
1412 drm_property_create_range(rdev->ddev, 0, "load detection", 0, 1);
1413 if (!rdev->mode_info.load_detect_property)
1414 return -ENOMEM;
1415
1416 drm_mode_create_scaling_mode_property(rdev->ddev);
1417
1418 sz = ARRAY_SIZE(radeon_tv_std_enum_list);
1419 rdev->mode_info.tv_std_property =
1420 drm_property_create_enum(rdev->ddev, 0,
1421 "tv standard",
1422 radeon_tv_std_enum_list, sz);
1423
1424 sz = ARRAY_SIZE(radeon_underscan_enum_list);
1425 rdev->mode_info.underscan_property =
1426 drm_property_create_enum(rdev->ddev, 0,
1427 "underscan",
1428 radeon_underscan_enum_list, sz);
1429
1430 rdev->mode_info.underscan_hborder_property =
1431 drm_property_create_range(rdev->ddev, 0,
1432 "underscan hborder", 0, 128);
1433 if (!rdev->mode_info.underscan_hborder_property)
1434 return -ENOMEM;
1435
1436 rdev->mode_info.underscan_vborder_property =
1437 drm_property_create_range(rdev->ddev, 0,
1438 "underscan vborder", 0, 128);
1439 if (!rdev->mode_info.underscan_vborder_property)
1440 return -ENOMEM;
1441
1442 sz = ARRAY_SIZE(radeon_audio_enum_list);
1443 rdev->mode_info.audio_property =
1444 drm_property_create_enum(rdev->ddev, 0,
1445 "audio",
1446 radeon_audio_enum_list, sz);
1447
1448 sz = ARRAY_SIZE(radeon_dither_enum_list);
1449 rdev->mode_info.dither_property =
1450 drm_property_create_enum(rdev->ddev, 0,
1451 "dither",
1452 radeon_dither_enum_list, sz);
1453
1454 sz = ARRAY_SIZE(radeon_output_csc_enum_list);
1455 rdev->mode_info.output_csc_property =
1456 drm_property_create_enum(rdev->ddev, 0,
1457 "output_csc",
1458 radeon_output_csc_enum_list, sz);
1459
1460 return 0;
1461}
1462
1463void radeon_update_display_priority(struct radeon_device *rdev)
1464{
1465
1466 if ((radeon_disp_priority == 0) || (radeon_disp_priority > 2)) {
1467
1468
1469
1470
1471
1472
1473 if ((ASIC_IS_R300(rdev) || (rdev->family == CHIP_RV515)) &&
1474 !(rdev->flags & RADEON_IS_IGP))
1475 rdev->disp_priority = 2;
1476 else
1477 rdev->disp_priority = 0;
1478 } else
1479 rdev->disp_priority = radeon_disp_priority;
1480
1481}
1482
1483
1484
1485
1486static void radeon_afmt_init(struct radeon_device *rdev)
1487{
1488 int i;
1489
1490 for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++)
1491 rdev->mode_info.afmt[i] = NULL;
1492
1493 if (ASIC_IS_NODCE(rdev)) {
1494
1495 } else if (ASIC_IS_DCE4(rdev)) {
1496 static uint32_t eg_offsets[] = {
1497 EVERGREEN_CRTC0_REGISTER_OFFSET,
1498 EVERGREEN_CRTC1_REGISTER_OFFSET,
1499 EVERGREEN_CRTC2_REGISTER_OFFSET,
1500 EVERGREEN_CRTC3_REGISTER_OFFSET,
1501 EVERGREEN_CRTC4_REGISTER_OFFSET,
1502 EVERGREEN_CRTC5_REGISTER_OFFSET,
1503 0x13830 - 0x7030,
1504 };
1505 int num_afmt;
1506
1507
1508
1509
1510
1511 if (ASIC_IS_DCE8(rdev))
1512 num_afmt = 7;
1513 else if (ASIC_IS_DCE6(rdev))
1514 num_afmt = 6;
1515 else if (ASIC_IS_DCE5(rdev))
1516 num_afmt = 6;
1517 else if (ASIC_IS_DCE41(rdev))
1518 num_afmt = 2;
1519 else
1520 num_afmt = 6;
1521
1522 BUG_ON(num_afmt > ARRAY_SIZE(eg_offsets));
1523 for (i = 0; i < num_afmt; i++) {
1524 rdev->mode_info.afmt[i] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1525 if (rdev->mode_info.afmt[i]) {
1526 rdev->mode_info.afmt[i]->offset = eg_offsets[i];
1527 rdev->mode_info.afmt[i]->id = i;
1528 }
1529 }
1530 } else if (ASIC_IS_DCE3(rdev)) {
1531
1532 rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1533 if (rdev->mode_info.afmt[0]) {
1534 rdev->mode_info.afmt[0]->offset = DCE3_HDMI_OFFSET0;
1535 rdev->mode_info.afmt[0]->id = 0;
1536 }
1537 rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1538 if (rdev->mode_info.afmt[1]) {
1539 rdev->mode_info.afmt[1]->offset = DCE3_HDMI_OFFSET1;
1540 rdev->mode_info.afmt[1]->id = 1;
1541 }
1542 } else if (ASIC_IS_DCE2(rdev)) {
1543
1544 rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1545 if (rdev->mode_info.afmt[0]) {
1546 rdev->mode_info.afmt[0]->offset = DCE2_HDMI_OFFSET0;
1547 rdev->mode_info.afmt[0]->id = 0;
1548 }
1549
1550 if (rdev->family >= CHIP_R600) {
1551 rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1552 if (rdev->mode_info.afmt[1]) {
1553 rdev->mode_info.afmt[1]->offset = DCE2_HDMI_OFFSET1;
1554 rdev->mode_info.afmt[1]->id = 1;
1555 }
1556 }
1557 }
1558}
1559
1560static void radeon_afmt_fini(struct radeon_device *rdev)
1561{
1562 int i;
1563
1564 for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++) {
1565 kfree(rdev->mode_info.afmt[i]);
1566 rdev->mode_info.afmt[i] = NULL;
1567 }
1568}
1569
1570int radeon_modeset_init(struct radeon_device *rdev)
1571{
1572 int i;
1573 int ret;
1574
1575 drm_mode_config_init(rdev->ddev);
1576 rdev->mode_info.mode_config_initialized = true;
1577
1578 rdev->ddev->mode_config.funcs = &radeon_mode_funcs;
1579
1580 if (ASIC_IS_DCE5(rdev)) {
1581 rdev->ddev->mode_config.max_width = 16384;
1582 rdev->ddev->mode_config.max_height = 16384;
1583 } else if (ASIC_IS_AVIVO(rdev)) {
1584 rdev->ddev->mode_config.max_width = 8192;
1585 rdev->ddev->mode_config.max_height = 8192;
1586 } else {
1587 rdev->ddev->mode_config.max_width = 4096;
1588 rdev->ddev->mode_config.max_height = 4096;
1589 }
1590
1591 rdev->ddev->mode_config.preferred_depth = 24;
1592 rdev->ddev->mode_config.prefer_shadow = 1;
1593
1594 rdev->ddev->mode_config.fb_base = rdev->mc.aper_base;
1595
1596 ret = radeon_modeset_create_props(rdev);
1597 if (ret) {
1598 return ret;
1599 }
1600
1601
1602 radeon_i2c_init(rdev);
1603
1604
1605 if (!rdev->is_atom_bios) {
1606
1607 radeon_combios_check_hardcoded_edid(rdev);
1608 }
1609
1610
1611 for (i = 0; i < rdev->num_crtc; i++) {
1612 radeon_crtc_init(rdev->ddev, i);
1613 }
1614
1615
1616 ret = radeon_setup_enc_conn(rdev->ddev);
1617 if (!ret) {
1618 return ret;
1619 }
1620
1621
1622 if (rdev->is_atom_bios) {
1623 radeon_atom_encoder_init(rdev);
1624 radeon_atom_disp_eng_pll_init(rdev);
1625 }
1626
1627
1628 radeon_hpd_init(rdev);
1629
1630
1631 radeon_afmt_init(rdev);
1632
1633 radeon_fbdev_init(rdev);
1634 drm_kms_helper_poll_init(rdev->ddev);
1635
1636 if (rdev->pm.dpm_enabled) {
1637
1638 ret = radeon_pm_late_init(rdev);
1639 if (ret) {
1640 rdev->pm.dpm_enabled = false;
1641 DRM_ERROR("radeon_pm_late_init failed, disabling dpm\n");
1642 }
1643
1644
1645
1646 radeon_pm_compute_clocks(rdev);
1647 }
1648
1649 return 0;
1650}
1651
1652void radeon_modeset_fini(struct radeon_device *rdev)
1653{
1654 radeon_fbdev_fini(rdev);
1655 kfree(rdev->mode_info.bios_hardcoded_edid);
1656
1657 if (rdev->mode_info.mode_config_initialized) {
1658 radeon_afmt_fini(rdev);
1659 drm_kms_helper_poll_fini(rdev->ddev);
1660 radeon_hpd_fini(rdev);
1661 drm_mode_config_cleanup(rdev->ddev);
1662 rdev->mode_info.mode_config_initialized = false;
1663 }
1664
1665 radeon_i2c_fini(rdev);
1666}
1667
1668static bool is_hdtv_mode(const struct drm_display_mode *mode)
1669{
1670
1671 if ((mode->vdisplay == 480 && mode->hdisplay == 720) ||
1672 (mode->vdisplay == 576) ||
1673 (mode->vdisplay == 720) ||
1674 (mode->vdisplay == 1080))
1675 return true;
1676 else
1677 return false;
1678}
1679
1680bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
1681 const struct drm_display_mode *mode,
1682 struct drm_display_mode *adjusted_mode)
1683{
1684 struct drm_device *dev = crtc->dev;
1685 struct radeon_device *rdev = dev->dev_private;
1686 struct drm_encoder *encoder;
1687 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1688 struct radeon_encoder *radeon_encoder;
1689 struct drm_connector *connector;
1690 struct radeon_connector *radeon_connector;
1691 bool first = true;
1692 u32 src_v = 1, dst_v = 1;
1693 u32 src_h = 1, dst_h = 1;
1694
1695 radeon_crtc->h_border = 0;
1696 radeon_crtc->v_border = 0;
1697
1698 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1699 if (encoder->crtc != crtc)
1700 continue;
1701 radeon_encoder = to_radeon_encoder(encoder);
1702 connector = radeon_get_connector_for_encoder(encoder);
1703 radeon_connector = to_radeon_connector(connector);
1704
1705 if (first) {
1706
1707 if (radeon_encoder->rmx_type == RMX_OFF)
1708 radeon_crtc->rmx_type = RMX_OFF;
1709 else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay ||
1710 mode->vdisplay < radeon_encoder->native_mode.vdisplay)
1711 radeon_crtc->rmx_type = radeon_encoder->rmx_type;
1712 else
1713 radeon_crtc->rmx_type = RMX_OFF;
1714
1715 memcpy(&radeon_crtc->native_mode,
1716 &radeon_encoder->native_mode,
1717 sizeof(struct drm_display_mode));
1718 src_v = crtc->mode.vdisplay;
1719 dst_v = radeon_crtc->native_mode.vdisplay;
1720 src_h = crtc->mode.hdisplay;
1721 dst_h = radeon_crtc->native_mode.hdisplay;
1722
1723
1724 if (ASIC_IS_AVIVO(rdev) &&
1725 (!(mode->flags & DRM_MODE_FLAG_INTERLACE)) &&
1726 ((radeon_encoder->underscan_type == UNDERSCAN_ON) ||
1727 ((radeon_encoder->underscan_type == UNDERSCAN_AUTO) &&
1728 drm_detect_hdmi_monitor(radeon_connector_edid(connector)) &&
1729 is_hdtv_mode(mode)))) {
1730 if (radeon_encoder->underscan_hborder != 0)
1731 radeon_crtc->h_border = radeon_encoder->underscan_hborder;
1732 else
1733 radeon_crtc->h_border = (mode->hdisplay >> 5) + 16;
1734 if (radeon_encoder->underscan_vborder != 0)
1735 radeon_crtc->v_border = radeon_encoder->underscan_vborder;
1736 else
1737 radeon_crtc->v_border = (mode->vdisplay >> 5) + 16;
1738 radeon_crtc->rmx_type = RMX_FULL;
1739 src_v = crtc->mode.vdisplay;
1740 dst_v = crtc->mode.vdisplay - (radeon_crtc->v_border * 2);
1741 src_h = crtc->mode.hdisplay;
1742 dst_h = crtc->mode.hdisplay - (radeon_crtc->h_border * 2);
1743 }
1744 first = false;
1745 } else {
1746 if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) {
1747
1748
1749
1750
1751
1752
1753 DRM_ERROR("Scaling not consistent across encoder.\n");
1754 return false;
1755 }
1756 }
1757 }
1758 if (radeon_crtc->rmx_type != RMX_OFF) {
1759 fixed20_12 a, b;
1760 a.full = dfixed_const(src_v);
1761 b.full = dfixed_const(dst_v);
1762 radeon_crtc->vsc.full = dfixed_div(a, b);
1763 a.full = dfixed_const(src_h);
1764 b.full = dfixed_const(dst_h);
1765 radeon_crtc->hsc.full = dfixed_div(a, b);
1766 } else {
1767 radeon_crtc->vsc.full = dfixed_const(1);
1768 radeon_crtc->hsc.full = dfixed_const(1);
1769 }
1770 return true;
1771}
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801int radeon_get_crtc_scanoutpos(struct drm_device *dev, int crtc, unsigned int flags,
1802 int *vpos, int *hpos, ktime_t *stime, ktime_t *etime)
1803{
1804 u32 stat_crtc = 0, vbl = 0, position = 0;
1805 int vbl_start, vbl_end, vtotal, ret = 0;
1806 bool in_vbl = true;
1807
1808 struct radeon_device *rdev = dev->dev_private;
1809
1810
1811
1812
1813 if (stime)
1814 *stime = ktime_get();
1815
1816 if (ASIC_IS_DCE4(rdev)) {
1817 if (crtc == 0) {
1818 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1819 EVERGREEN_CRTC0_REGISTER_OFFSET);
1820 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1821 EVERGREEN_CRTC0_REGISTER_OFFSET);
1822 ret |= DRM_SCANOUTPOS_VALID;
1823 }
1824 if (crtc == 1) {
1825 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1826 EVERGREEN_CRTC1_REGISTER_OFFSET);
1827 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1828 EVERGREEN_CRTC1_REGISTER_OFFSET);
1829 ret |= DRM_SCANOUTPOS_VALID;
1830 }
1831 if (crtc == 2) {
1832 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1833 EVERGREEN_CRTC2_REGISTER_OFFSET);
1834 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1835 EVERGREEN_CRTC2_REGISTER_OFFSET);
1836 ret |= DRM_SCANOUTPOS_VALID;
1837 }
1838 if (crtc == 3) {
1839 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1840 EVERGREEN_CRTC3_REGISTER_OFFSET);
1841 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1842 EVERGREEN_CRTC3_REGISTER_OFFSET);
1843 ret |= DRM_SCANOUTPOS_VALID;
1844 }
1845 if (crtc == 4) {
1846 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1847 EVERGREEN_CRTC4_REGISTER_OFFSET);
1848 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1849 EVERGREEN_CRTC4_REGISTER_OFFSET);
1850 ret |= DRM_SCANOUTPOS_VALID;
1851 }
1852 if (crtc == 5) {
1853 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1854 EVERGREEN_CRTC5_REGISTER_OFFSET);
1855 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1856 EVERGREEN_CRTC5_REGISTER_OFFSET);
1857 ret |= DRM_SCANOUTPOS_VALID;
1858 }
1859 } else if (ASIC_IS_AVIVO(rdev)) {
1860 if (crtc == 0) {
1861 vbl = RREG32(AVIVO_D1CRTC_V_BLANK_START_END);
1862 position = RREG32(AVIVO_D1CRTC_STATUS_POSITION);
1863 ret |= DRM_SCANOUTPOS_VALID;
1864 }
1865 if (crtc == 1) {
1866 vbl = RREG32(AVIVO_D2CRTC_V_BLANK_START_END);
1867 position = RREG32(AVIVO_D2CRTC_STATUS_POSITION);
1868 ret |= DRM_SCANOUTPOS_VALID;
1869 }
1870 } else {
1871
1872 if (crtc == 0) {
1873
1874
1875
1876 vbl = (RREG32(RADEON_CRTC_V_TOTAL_DISP) &
1877 RADEON_CRTC_V_DISP) >> RADEON_CRTC_V_DISP_SHIFT;
1878
1879 position = (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL;
1880 stat_crtc = RREG32(RADEON_CRTC_STATUS);
1881 if (!(stat_crtc & 1))
1882 in_vbl = false;
1883
1884 ret |= DRM_SCANOUTPOS_VALID;
1885 }
1886 if (crtc == 1) {
1887 vbl = (RREG32(RADEON_CRTC2_V_TOTAL_DISP) &
1888 RADEON_CRTC_V_DISP) >> RADEON_CRTC_V_DISP_SHIFT;
1889 position = (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL;
1890 stat_crtc = RREG32(RADEON_CRTC2_STATUS);
1891 if (!(stat_crtc & 1))
1892 in_vbl = false;
1893
1894 ret |= DRM_SCANOUTPOS_VALID;
1895 }
1896 }
1897
1898
1899 if (etime)
1900 *etime = ktime_get();
1901
1902
1903
1904
1905 *vpos = position & 0x1fff;
1906 *hpos = (position >> 16) & 0x1fff;
1907
1908
1909 if (vbl > 0) {
1910
1911 ret |= DRM_SCANOUTPOS_ACCURATE;
1912 vbl_start = vbl & 0x1fff;
1913 vbl_end = (vbl >> 16) & 0x1fff;
1914 }
1915 else {
1916
1917 vbl_start = rdev->mode_info.crtcs[crtc]->base.hwmode.crtc_vdisplay;
1918 vbl_end = 0;
1919 }
1920
1921
1922 if ((*vpos < vbl_start) && (*vpos >= vbl_end))
1923 in_vbl = false;
1924
1925
1926
1927
1928
1929
1930
1931
1932 if (in_vbl && (*vpos >= vbl_start)) {
1933 vtotal = rdev->mode_info.crtcs[crtc]->base.hwmode.crtc_vtotal;
1934 *vpos = *vpos - vtotal;
1935 }
1936
1937
1938 *vpos = *vpos - vbl_end;
1939
1940
1941 if (in_vbl)
1942 ret |= DRM_SCANOUTPOS_IN_VBLANK;
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954 if ((flags & DRM_CALLED_FROM_VBLIRQ) && !in_vbl) {
1955 vbl_start = rdev->mode_info.crtcs[crtc]->base.hwmode.crtc_vdisplay;
1956 vtotal = rdev->mode_info.crtcs[crtc]->base.hwmode.crtc_vtotal;
1957
1958 if (vbl_start - *vpos < vtotal / 100) {
1959 *vpos -= vtotal;
1960
1961
1962 ret |= 0x8;
1963 }
1964 }
1965
1966 return ret;
1967}
1968