1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <drm/drmP.h>
27#include <drm/radeon_drm.h>
28#include "radeon.h"
29
30#include "atom.h"
31#include <asm/div64.h>
32
33#include <linux/pm_runtime.h>
34#include <drm/drm_crtc_helper.h>
35#include <drm/drm_fb_helper.h>
36#include <drm/drm_plane_helper.h>
37#include <drm/drm_edid.h>
38
39#include <linux/gcd.h>
40
41static void avivo_crtc_load_lut(struct drm_crtc *crtc)
42{
43 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
44 struct drm_device *dev = crtc->dev;
45 struct radeon_device *rdev = dev->dev_private;
46 u16 *r, *g, *b;
47 int i;
48
49 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
50 WREG32(AVIVO_DC_LUTA_CONTROL + radeon_crtc->crtc_offset, 0);
51
52 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
53 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
54 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
55
56 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
57 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
58 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
59
60 WREG32(AVIVO_DC_LUT_RW_SELECT, radeon_crtc->crtc_id);
61 WREG32(AVIVO_DC_LUT_RW_MODE, 0);
62 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK, 0x0000003f);
63
64 WREG8(AVIVO_DC_LUT_RW_INDEX, 0);
65 r = crtc->gamma_store;
66 g = r + crtc->gamma_size;
67 b = g + crtc->gamma_size;
68 for (i = 0; i < 256; i++) {
69 WREG32(AVIVO_DC_LUT_30_COLOR,
70 ((*r++ & 0xffc0) << 14) |
71 ((*g++ & 0xffc0) << 4) |
72 (*b++ >> 6));
73 }
74
75
76 WREG32_P(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id, ~1);
77}
78
79static void dce4_crtc_load_lut(struct drm_crtc *crtc)
80{
81 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
82 struct drm_device *dev = crtc->dev;
83 struct radeon_device *rdev = dev->dev_private;
84 u16 *r, *g, *b;
85 int i;
86
87 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
88 WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
89
90 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
91 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
92 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
93
94 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
95 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
96 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
97
98 WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
99 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
100
101 WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
102 r = crtc->gamma_store;
103 g = r + crtc->gamma_size;
104 b = g + crtc->gamma_size;
105 for (i = 0; i < 256; i++) {
106 WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
107 ((*r++ & 0xffc0) << 14) |
108 ((*g++ & 0xffc0) << 4) |
109 (*b++ >> 6));
110 }
111}
112
113static void dce5_crtc_load_lut(struct drm_crtc *crtc)
114{
115 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
116 struct drm_device *dev = crtc->dev;
117 struct radeon_device *rdev = dev->dev_private;
118 u16 *r, *g, *b;
119 int i;
120
121 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
122
123 WREG32(NI_INPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
124 (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS) |
125 NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS)));
126 WREG32(NI_PRESCALE_GRPH_CONTROL + radeon_crtc->crtc_offset,
127 NI_GRPH_PRESCALE_BYPASS);
128 WREG32(NI_PRESCALE_OVL_CONTROL + radeon_crtc->crtc_offset,
129 NI_OVL_PRESCALE_BYPASS);
130 WREG32(NI_INPUT_GAMMA_CONTROL + radeon_crtc->crtc_offset,
131 (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT) |
132 NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT)));
133
134 WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
135
136 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
137 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
138 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
139
140 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
141 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
142 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
143
144 WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
145 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
146
147 WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
148 r = crtc->gamma_store;
149 g = r + crtc->gamma_size;
150 b = g + crtc->gamma_size;
151 for (i = 0; i < 256; i++) {
152 WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
153 ((*r++ & 0xffc0) << 14) |
154 ((*g++ & 0xffc0) << 4) |
155 (*b++ >> 6));
156 }
157
158 WREG32(NI_DEGAMMA_CONTROL + radeon_crtc->crtc_offset,
159 (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
160 NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
161 NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
162 NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS)));
163 WREG32(NI_GAMUT_REMAP_CONTROL + radeon_crtc->crtc_offset,
164 (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS) |
165 NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS)));
166 WREG32(NI_REGAMMA_CONTROL + radeon_crtc->crtc_offset,
167 (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS) |
168 NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS)));
169 WREG32(NI_OUTPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
170 (NI_OUTPUT_CSC_GRPH_MODE(radeon_crtc->output_csc) |
171 NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS)));
172
173 WREG32(0x6940 + radeon_crtc->crtc_offset, 0);
174 if (ASIC_IS_DCE8(rdev)) {
175
176
177
178 WREG32(CIK_ALPHA_CONTROL + radeon_crtc->crtc_offset,
179 CIK_CURSOR_ALPHA_BLND_ENA);
180 }
181}
182
183static void legacy_crtc_load_lut(struct drm_crtc *crtc)
184{
185 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
186 struct drm_device *dev = crtc->dev;
187 struct radeon_device *rdev = dev->dev_private;
188 u16 *r, *g, *b;
189 int i;
190 uint32_t dac2_cntl;
191
192 dac2_cntl = RREG32(RADEON_DAC_CNTL2);
193 if (radeon_crtc->crtc_id == 0)
194 dac2_cntl &= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL;
195 else
196 dac2_cntl |= RADEON_DAC2_PALETTE_ACC_CTL;
197 WREG32(RADEON_DAC_CNTL2, dac2_cntl);
198
199 WREG8(RADEON_PALETTE_INDEX, 0);
200 r = crtc->gamma_store;
201 g = r + crtc->gamma_size;
202 b = g + crtc->gamma_size;
203 for (i = 0; i < 256; i++) {
204 WREG32(RADEON_PALETTE_30_DATA,
205 ((*r++ & 0xffc0) << 14) |
206 ((*g++ & 0xffc0) << 4) |
207 (*b++ >> 6));
208 }
209}
210
211void radeon_crtc_load_lut(struct drm_crtc *crtc)
212{
213 struct drm_device *dev = crtc->dev;
214 struct radeon_device *rdev = dev->dev_private;
215
216 if (!crtc->enabled)
217 return;
218
219 if (ASIC_IS_DCE5(rdev))
220 dce5_crtc_load_lut(crtc);
221 else if (ASIC_IS_DCE4(rdev))
222 dce4_crtc_load_lut(crtc);
223 else if (ASIC_IS_AVIVO(rdev))
224 avivo_crtc_load_lut(crtc);
225 else
226 legacy_crtc_load_lut(crtc);
227}
228
229static int radeon_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green,
230 u16 *blue, uint32_t size,
231 struct drm_modeset_acquire_ctx *ctx)
232{
233 radeon_crtc_load_lut(crtc);
234
235 return 0;
236}
237
238static void radeon_crtc_destroy(struct drm_crtc *crtc)
239{
240 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
241
242 drm_crtc_cleanup(crtc);
243 destroy_workqueue(radeon_crtc->flip_queue);
244 kfree(radeon_crtc);
245}
246
247
248
249
250
251
252
253
254static void radeon_unpin_work_func(struct work_struct *__work)
255{
256 struct radeon_flip_work *work =
257 container_of(__work, struct radeon_flip_work, unpin_work);
258 int r;
259
260
261 r = radeon_bo_reserve(work->old_rbo, false);
262 if (likely(r == 0)) {
263 r = radeon_bo_unpin(work->old_rbo);
264 if (unlikely(r != 0)) {
265 DRM_ERROR("failed to unpin buffer after flip\n");
266 }
267 radeon_bo_unreserve(work->old_rbo);
268 } else
269 DRM_ERROR("failed to reserve buffer after flip\n");
270
271 drm_gem_object_put_unlocked(&work->old_rbo->gem_base);
272 kfree(work);
273}
274
275void radeon_crtc_handle_vblank(struct radeon_device *rdev, int crtc_id)
276{
277 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
278 unsigned long flags;
279 u32 update_pending;
280 int vpos, hpos;
281
282
283 if (radeon_crtc == NULL)
284 return;
285
286
287
288
289
290
291
292
293
294
295 if ((radeon_use_pflipirq == 2) && ASIC_IS_DCE4(rdev))
296 return;
297
298 spin_lock_irqsave(&rdev->ddev->event_lock, flags);
299 if (radeon_crtc->flip_status != RADEON_FLIP_SUBMITTED) {
300 DRM_DEBUG_DRIVER("radeon_crtc->flip_status = %d != "
301 "RADEON_FLIP_SUBMITTED(%d)\n",
302 radeon_crtc->flip_status,
303 RADEON_FLIP_SUBMITTED);
304 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
305 return;
306 }
307
308 update_pending = radeon_page_flip_pending(rdev, crtc_id);
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328 if (update_pending &&
329 (DRM_SCANOUTPOS_VALID &
330 radeon_get_crtc_scanoutpos(rdev->ddev, crtc_id,
331 GET_DISTANCE_TO_VBLANKSTART,
332 &vpos, &hpos, NULL, NULL,
333 &rdev->mode_info.crtcs[crtc_id]->base.hwmode)) &&
334 ((vpos >= 0 && hpos < 0) || (hpos >= 0 && !ASIC_IS_AVIVO(rdev)))) {
335
336
337
338
339
340
341 update_pending = 0;
342 }
343 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
344 if (!update_pending)
345 radeon_crtc_handle_flip(rdev, crtc_id);
346}
347
348
349
350
351
352
353
354
355
356void radeon_crtc_handle_flip(struct radeon_device *rdev, int crtc_id)
357{
358 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
359 struct radeon_flip_work *work;
360 unsigned long flags;
361
362
363 if (radeon_crtc == NULL)
364 return;
365
366 spin_lock_irqsave(&rdev->ddev->event_lock, flags);
367 work = radeon_crtc->flip_work;
368 if (radeon_crtc->flip_status != RADEON_FLIP_SUBMITTED) {
369 DRM_DEBUG_DRIVER("radeon_crtc->flip_status = %d != "
370 "RADEON_FLIP_SUBMITTED(%d)\n",
371 radeon_crtc->flip_status,
372 RADEON_FLIP_SUBMITTED);
373 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
374 return;
375 }
376
377
378 radeon_crtc->flip_status = RADEON_FLIP_NONE;
379 radeon_crtc->flip_work = NULL;
380
381
382 if (work->event)
383 drm_crtc_send_vblank_event(&radeon_crtc->base, work->event);
384
385 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
386
387 drm_crtc_vblank_put(&radeon_crtc->base);
388 radeon_irq_kms_pflip_irq_put(rdev, work->crtc_id);
389 queue_work(radeon_crtc->flip_queue, &work->unpin_work);
390}
391
392
393
394
395
396
397
398
399static void radeon_flip_work_func(struct work_struct *__work)
400{
401 struct radeon_flip_work *work =
402 container_of(__work, struct radeon_flip_work, flip_work);
403 struct radeon_device *rdev = work->rdev;
404 struct drm_device *dev = rdev->ddev;
405 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[work->crtc_id];
406
407 struct drm_crtc *crtc = &radeon_crtc->base;
408 unsigned long flags;
409 int r;
410 int vpos, hpos;
411
412 down_read(&rdev->exclusive_lock);
413 if (work->fence) {
414 struct radeon_fence *fence;
415
416 fence = to_radeon_fence(work->fence);
417 if (fence && fence->rdev == rdev) {
418 r = radeon_fence_wait(fence, false);
419 if (r == -EDEADLK) {
420 up_read(&rdev->exclusive_lock);
421 do {
422 r = radeon_gpu_reset(rdev);
423 } while (r == -EAGAIN);
424 down_read(&rdev->exclusive_lock);
425 }
426 } else
427 r = dma_fence_wait(work->fence, false);
428
429 if (r)
430 DRM_ERROR("failed to wait on page flip fence (%d)!\n", r);
431
432
433
434
435
436
437 dma_fence_put(work->fence);
438 work->fence = NULL;
439 }
440
441
442
443
444
445
446 while (radeon_crtc->enabled &&
447 (radeon_get_crtc_scanoutpos(dev, work->crtc_id, 0,
448 &vpos, &hpos, NULL, NULL,
449 &crtc->hwmode)
450 & (DRM_SCANOUTPOS_VALID | DRM_SCANOUTPOS_IN_VBLANK)) ==
451 (DRM_SCANOUTPOS_VALID | DRM_SCANOUTPOS_IN_VBLANK) &&
452 (!ASIC_IS_AVIVO(rdev) ||
453 ((int) (work->target_vblank -
454 dev->driver->get_vblank_counter(dev, work->crtc_id)) > 0)))
455 usleep_range(1000, 2000);
456
457
458 spin_lock_irqsave(&crtc->dev->event_lock, flags);
459
460
461 radeon_irq_kms_pflip_irq_get(rdev, radeon_crtc->crtc_id);
462
463
464 radeon_page_flip(rdev, radeon_crtc->crtc_id, work->base, work->async);
465
466 radeon_crtc->flip_status = RADEON_FLIP_SUBMITTED;
467 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
468 up_read(&rdev->exclusive_lock);
469}
470
471static int radeon_crtc_page_flip_target(struct drm_crtc *crtc,
472 struct drm_framebuffer *fb,
473 struct drm_pending_vblank_event *event,
474 uint32_t page_flip_flags,
475 uint32_t target,
476 struct drm_modeset_acquire_ctx *ctx)
477{
478 struct drm_device *dev = crtc->dev;
479 struct radeon_device *rdev = dev->dev_private;
480 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
481 struct radeon_framebuffer *old_radeon_fb;
482 struct radeon_framebuffer *new_radeon_fb;
483 struct drm_gem_object *obj;
484 struct radeon_flip_work *work;
485 struct radeon_bo *new_rbo;
486 uint32_t tiling_flags, pitch_pixels;
487 uint64_t base;
488 unsigned long flags;
489 int r;
490
491 work = kzalloc(sizeof *work, GFP_KERNEL);
492 if (work == NULL)
493 return -ENOMEM;
494
495 INIT_WORK(&work->flip_work, radeon_flip_work_func);
496 INIT_WORK(&work->unpin_work, radeon_unpin_work_func);
497
498 work->rdev = rdev;
499 work->crtc_id = radeon_crtc->crtc_id;
500 work->event = event;
501 work->async = (page_flip_flags & DRM_MODE_PAGE_FLIP_ASYNC) != 0;
502
503
504 old_radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
505 obj = old_radeon_fb->obj;
506
507
508 drm_gem_object_get(obj);
509 work->old_rbo = gem_to_radeon_bo(obj);
510
511 new_radeon_fb = to_radeon_framebuffer(fb);
512 obj = new_radeon_fb->obj;
513 new_rbo = gem_to_radeon_bo(obj);
514
515
516 DRM_DEBUG_DRIVER("flip-ioctl() cur_rbo = %p, new_rbo = %p\n",
517 work->old_rbo, new_rbo);
518
519 r = radeon_bo_reserve(new_rbo, false);
520 if (unlikely(r != 0)) {
521 DRM_ERROR("failed to reserve new rbo buffer before flip\n");
522 goto cleanup;
523 }
524
525 r = radeon_bo_pin_restricted(new_rbo, RADEON_GEM_DOMAIN_VRAM,
526 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, &base);
527 if (unlikely(r != 0)) {
528 radeon_bo_unreserve(new_rbo);
529 r = -EINVAL;
530 DRM_ERROR("failed to pin new rbo buffer before flip\n");
531 goto cleanup;
532 }
533 work->fence = dma_fence_get(reservation_object_get_excl(new_rbo->tbo.resv));
534 radeon_bo_get_tiling_flags(new_rbo, &tiling_flags, NULL);
535 radeon_bo_unreserve(new_rbo);
536
537 if (!ASIC_IS_AVIVO(rdev)) {
538
539 base -= radeon_crtc->legacy_display_base_addr;
540 pitch_pixels = fb->pitches[0] / fb->format->cpp[0];
541
542 if (tiling_flags & RADEON_TILING_MACRO) {
543 if (ASIC_IS_R300(rdev)) {
544 base &= ~0x7ff;
545 } else {
546 int byteshift = fb->format->cpp[0] * 8 >> 4;
547 int tile_addr = (((crtc->y >> 3) * pitch_pixels + crtc->x) >> (8 - byteshift)) << 11;
548 base += tile_addr + ((crtc->x << byteshift) % 256) + ((crtc->y % 8) << 8);
549 }
550 } else {
551 int offset = crtc->y * pitch_pixels + crtc->x;
552 switch (fb->format->cpp[0] * 8) {
553 case 8:
554 default:
555 offset *= 1;
556 break;
557 case 15:
558 case 16:
559 offset *= 2;
560 break;
561 case 24:
562 offset *= 3;
563 break;
564 case 32:
565 offset *= 4;
566 break;
567 }
568 base += offset;
569 }
570 base &= ~7;
571 }
572 work->base = base;
573 work->target_vblank = target - drm_crtc_vblank_count(crtc) +
574 dev->driver->get_vblank_counter(dev, work->crtc_id);
575
576
577 spin_lock_irqsave(&crtc->dev->event_lock, flags);
578
579 if (radeon_crtc->flip_status != RADEON_FLIP_NONE) {
580 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
581 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
582 r = -EBUSY;
583 goto pflip_cleanup;
584 }
585 radeon_crtc->flip_status = RADEON_FLIP_PENDING;
586 radeon_crtc->flip_work = work;
587
588
589 crtc->primary->fb = fb;
590
591 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
592
593 queue_work(radeon_crtc->flip_queue, &work->flip_work);
594 return 0;
595
596pflip_cleanup:
597 if (unlikely(radeon_bo_reserve(new_rbo, false) != 0)) {
598 DRM_ERROR("failed to reserve new rbo in error path\n");
599 goto cleanup;
600 }
601 if (unlikely(radeon_bo_unpin(new_rbo) != 0)) {
602 DRM_ERROR("failed to unpin new rbo in error path\n");
603 }
604 radeon_bo_unreserve(new_rbo);
605
606cleanup:
607 drm_gem_object_put_unlocked(&work->old_rbo->gem_base);
608 dma_fence_put(work->fence);
609 kfree(work);
610 return r;
611}
612
613static int
614radeon_crtc_set_config(struct drm_mode_set *set,
615 struct drm_modeset_acquire_ctx *ctx)
616{
617 struct drm_device *dev;
618 struct radeon_device *rdev;
619 struct drm_crtc *crtc;
620 bool active = false;
621 int ret;
622
623 if (!set || !set->crtc)
624 return -EINVAL;
625
626 dev = set->crtc->dev;
627
628 ret = pm_runtime_get_sync(dev->dev);
629 if (ret < 0)
630 return ret;
631
632 ret = drm_crtc_helper_set_config(set, ctx);
633
634 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head)
635 if (crtc->enabled)
636 active = true;
637
638 pm_runtime_mark_last_busy(dev->dev);
639
640 rdev = dev->dev_private;
641
642
643 if (active && !rdev->have_disp_power_ref) {
644 rdev->have_disp_power_ref = true;
645 return ret;
646 }
647
648
649 if (!active && rdev->have_disp_power_ref) {
650 pm_runtime_put_autosuspend(dev->dev);
651 rdev->have_disp_power_ref = false;
652 }
653
654
655 pm_runtime_put_autosuspend(dev->dev);
656 return ret;
657}
658
659static const struct drm_crtc_funcs radeon_crtc_funcs = {
660 .cursor_set2 = radeon_crtc_cursor_set2,
661 .cursor_move = radeon_crtc_cursor_move,
662 .gamma_set = radeon_crtc_gamma_set,
663 .set_config = radeon_crtc_set_config,
664 .destroy = radeon_crtc_destroy,
665 .page_flip_target = radeon_crtc_page_flip_target,
666};
667
668static void radeon_crtc_init(struct drm_device *dev, int index)
669{
670 struct radeon_device *rdev = dev->dev_private;
671 struct radeon_crtc *radeon_crtc;
672 int i;
673
674 radeon_crtc = kzalloc(sizeof(struct radeon_crtc) + (RADEONFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
675 if (radeon_crtc == NULL)
676 return;
677
678 drm_crtc_init(dev, &radeon_crtc->base, &radeon_crtc_funcs);
679
680 drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256);
681 radeon_crtc->crtc_id = index;
682 radeon_crtc->flip_queue = alloc_workqueue("radeon-crtc", WQ_HIGHPRI, 0);
683 rdev->mode_info.crtcs[index] = radeon_crtc;
684
685 if (rdev->family >= CHIP_BONAIRE) {
686 radeon_crtc->max_cursor_width = CIK_CURSOR_WIDTH;
687 radeon_crtc->max_cursor_height = CIK_CURSOR_HEIGHT;
688 } else {
689 radeon_crtc->max_cursor_width = CURSOR_WIDTH;
690 radeon_crtc->max_cursor_height = CURSOR_HEIGHT;
691 }
692 dev->mode_config.cursor_width = radeon_crtc->max_cursor_width;
693 dev->mode_config.cursor_height = radeon_crtc->max_cursor_height;
694
695#if 0
696 radeon_crtc->mode_set.crtc = &radeon_crtc->base;
697 radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1);
698 radeon_crtc->mode_set.num_connectors = 0;
699#endif
700
701 for (i = 0; i < 256; i++) {
702 radeon_crtc->lut_r[i] = i << 2;
703 radeon_crtc->lut_g[i] = i << 2;
704 radeon_crtc->lut_b[i] = i << 2;
705 }
706
707 if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom))
708 radeon_atombios_init_crtc(dev, radeon_crtc);
709 else
710 radeon_legacy_init_crtc(dev, radeon_crtc);
711}
712
713static const char *encoder_names[38] = {
714 "NONE",
715 "INTERNAL_LVDS",
716 "INTERNAL_TMDS1",
717 "INTERNAL_TMDS2",
718 "INTERNAL_DAC1",
719 "INTERNAL_DAC2",
720 "INTERNAL_SDVOA",
721 "INTERNAL_SDVOB",
722 "SI170B",
723 "CH7303",
724 "CH7301",
725 "INTERNAL_DVO1",
726 "EXTERNAL_SDVOA",
727 "EXTERNAL_SDVOB",
728 "TITFP513",
729 "INTERNAL_LVTM1",
730 "VT1623",
731 "HDMI_SI1930",
732 "HDMI_INTERNAL",
733 "INTERNAL_KLDSCP_TMDS1",
734 "INTERNAL_KLDSCP_DVO1",
735 "INTERNAL_KLDSCP_DAC1",
736 "INTERNAL_KLDSCP_DAC2",
737 "SI178",
738 "MVPU_FPGA",
739 "INTERNAL_DDI",
740 "VT1625",
741 "HDMI_SI1932",
742 "DP_AN9801",
743 "DP_DP501",
744 "INTERNAL_UNIPHY",
745 "INTERNAL_KLDSCP_LVTMA",
746 "INTERNAL_UNIPHY1",
747 "INTERNAL_UNIPHY2",
748 "NUTMEG",
749 "TRAVIS",
750 "INTERNAL_VCE",
751 "INTERNAL_UNIPHY3",
752};
753
754static const char *hpd_names[6] = {
755 "HPD1",
756 "HPD2",
757 "HPD3",
758 "HPD4",
759 "HPD5",
760 "HPD6",
761};
762
763static void radeon_print_display_setup(struct drm_device *dev)
764{
765 struct drm_connector *connector;
766 struct radeon_connector *radeon_connector;
767 struct drm_encoder *encoder;
768 struct radeon_encoder *radeon_encoder;
769 uint32_t devices;
770 int i = 0;
771
772 DRM_INFO("Radeon Display Connectors\n");
773 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
774 radeon_connector = to_radeon_connector(connector);
775 DRM_INFO("Connector %d:\n", i);
776 DRM_INFO(" %s\n", connector->name);
777 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
778 DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]);
779 if (radeon_connector->ddc_bus) {
780 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
781 radeon_connector->ddc_bus->rec.mask_clk_reg,
782 radeon_connector->ddc_bus->rec.mask_data_reg,
783 radeon_connector->ddc_bus->rec.a_clk_reg,
784 radeon_connector->ddc_bus->rec.a_data_reg,
785 radeon_connector->ddc_bus->rec.en_clk_reg,
786 radeon_connector->ddc_bus->rec.en_data_reg,
787 radeon_connector->ddc_bus->rec.y_clk_reg,
788 radeon_connector->ddc_bus->rec.y_data_reg);
789 if (radeon_connector->router.ddc_valid)
790 DRM_INFO(" DDC Router 0x%x/0x%x\n",
791 radeon_connector->router.ddc_mux_control_pin,
792 radeon_connector->router.ddc_mux_state);
793 if (radeon_connector->router.cd_valid)
794 DRM_INFO(" Clock/Data Router 0x%x/0x%x\n",
795 radeon_connector->router.cd_mux_control_pin,
796 radeon_connector->router.cd_mux_state);
797 } else {
798 if (connector->connector_type == DRM_MODE_CONNECTOR_VGA ||
799 connector->connector_type == DRM_MODE_CONNECTOR_DVII ||
800 connector->connector_type == DRM_MODE_CONNECTOR_DVID ||
801 connector->connector_type == DRM_MODE_CONNECTOR_DVIA ||
802 connector->connector_type == DRM_MODE_CONNECTOR_HDMIA ||
803 connector->connector_type == DRM_MODE_CONNECTOR_HDMIB)
804 DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
805 }
806 DRM_INFO(" Encoders:\n");
807 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
808 radeon_encoder = to_radeon_encoder(encoder);
809 devices = radeon_encoder->devices & radeon_connector->devices;
810 if (devices) {
811 if (devices & ATOM_DEVICE_CRT1_SUPPORT)
812 DRM_INFO(" CRT1: %s\n", encoder_names[radeon_encoder->encoder_id]);
813 if (devices & ATOM_DEVICE_CRT2_SUPPORT)
814 DRM_INFO(" CRT2: %s\n", encoder_names[radeon_encoder->encoder_id]);
815 if (devices & ATOM_DEVICE_LCD1_SUPPORT)
816 DRM_INFO(" LCD1: %s\n", encoder_names[radeon_encoder->encoder_id]);
817 if (devices & ATOM_DEVICE_DFP1_SUPPORT)
818 DRM_INFO(" DFP1: %s\n", encoder_names[radeon_encoder->encoder_id]);
819 if (devices & ATOM_DEVICE_DFP2_SUPPORT)
820 DRM_INFO(" DFP2: %s\n", encoder_names[radeon_encoder->encoder_id]);
821 if (devices & ATOM_DEVICE_DFP3_SUPPORT)
822 DRM_INFO(" DFP3: %s\n", encoder_names[radeon_encoder->encoder_id]);
823 if (devices & ATOM_DEVICE_DFP4_SUPPORT)
824 DRM_INFO(" DFP4: %s\n", encoder_names[radeon_encoder->encoder_id]);
825 if (devices & ATOM_DEVICE_DFP5_SUPPORT)
826 DRM_INFO(" DFP5: %s\n", encoder_names[radeon_encoder->encoder_id]);
827 if (devices & ATOM_DEVICE_DFP6_SUPPORT)
828 DRM_INFO(" DFP6: %s\n", encoder_names[radeon_encoder->encoder_id]);
829 if (devices & ATOM_DEVICE_TV1_SUPPORT)
830 DRM_INFO(" TV1: %s\n", encoder_names[radeon_encoder->encoder_id]);
831 if (devices & ATOM_DEVICE_CV_SUPPORT)
832 DRM_INFO(" CV: %s\n", encoder_names[radeon_encoder->encoder_id]);
833 }
834 }
835 i++;
836 }
837}
838
839static bool radeon_setup_enc_conn(struct drm_device *dev)
840{
841 struct radeon_device *rdev = dev->dev_private;
842 bool ret = false;
843
844 if (rdev->bios) {
845 if (rdev->is_atom_bios) {
846 ret = radeon_get_atom_connector_info_from_supported_devices_table(dev);
847 if (ret == false)
848 ret = radeon_get_atom_connector_info_from_object_table(dev);
849 } else {
850 ret = radeon_get_legacy_connector_info_from_bios(dev);
851 if (ret == false)
852 ret = radeon_get_legacy_connector_info_from_table(dev);
853 }
854 } else {
855 if (!ASIC_IS_AVIVO(rdev))
856 ret = radeon_get_legacy_connector_info_from_table(dev);
857 }
858 if (ret) {
859 radeon_setup_encoder_clones(dev);
860 radeon_print_display_setup(dev);
861 }
862
863 return ret;
864}
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880static void avivo_reduce_ratio(unsigned *nom, unsigned *den,
881 unsigned nom_min, unsigned den_min)
882{
883 unsigned tmp;
884
885
886 tmp = gcd(*nom, *den);
887 *nom /= tmp;
888 *den /= tmp;
889
890
891 if (*nom < nom_min) {
892 tmp = DIV_ROUND_UP(nom_min, *nom);
893 *nom *= tmp;
894 *den *= tmp;
895 }
896
897
898 if (*den < den_min) {
899 tmp = DIV_ROUND_UP(den_min, *den);
900 *nom *= tmp;
901 *den *= tmp;
902 }
903}
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919static void avivo_get_fb_ref_div(unsigned nom, unsigned den, unsigned post_div,
920 unsigned fb_div_max, unsigned ref_div_max,
921 unsigned *fb_div, unsigned *ref_div)
922{
923
924 ref_div_max = max(min(100 / post_div, ref_div_max), 1u);
925
926
927 *ref_div = min(max(DIV_ROUND_CLOSEST(den, post_div), 1u), ref_div_max);
928 *fb_div = DIV_ROUND_CLOSEST(nom * *ref_div * post_div, den);
929
930
931 if (*fb_div > fb_div_max) {
932 *ref_div = DIV_ROUND_CLOSEST(*ref_div * fb_div_max, *fb_div);
933 *fb_div = fb_div_max;
934 }
935}
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950void radeon_compute_pll_avivo(struct radeon_pll *pll,
951 u32 freq,
952 u32 *dot_clock_p,
953 u32 *fb_div_p,
954 u32 *frac_fb_div_p,
955 u32 *ref_div_p,
956 u32 *post_div_p)
957{
958 unsigned target_clock = pll->flags & RADEON_PLL_USE_FRAC_FB_DIV ?
959 freq : freq / 10;
960
961 unsigned fb_div_min, fb_div_max, fb_div;
962 unsigned post_div_min, post_div_max, post_div;
963 unsigned ref_div_min, ref_div_max, ref_div;
964 unsigned post_div_best, diff_best;
965 unsigned nom, den;
966
967
968 fb_div_min = pll->min_feedback_div;
969 fb_div_max = pll->max_feedback_div;
970
971 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
972 fb_div_min *= 10;
973 fb_div_max *= 10;
974 }
975
976
977 if (pll->flags & RADEON_PLL_USE_REF_DIV)
978 ref_div_min = pll->reference_div;
979 else
980 ref_div_min = pll->min_ref_div;
981
982 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV &&
983 pll->flags & RADEON_PLL_USE_REF_DIV)
984 ref_div_max = pll->reference_div;
985 else if (pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP)
986
987 ref_div_max = min(pll->max_ref_div, 7u);
988 else
989 ref_div_max = pll->max_ref_div;
990
991
992 if (pll->flags & RADEON_PLL_USE_POST_DIV) {
993 post_div_min = pll->post_div;
994 post_div_max = pll->post_div;
995 } else {
996 unsigned vco_min, vco_max;
997
998 if (pll->flags & RADEON_PLL_IS_LCD) {
999 vco_min = pll->lcd_pll_out_min;
1000 vco_max = pll->lcd_pll_out_max;
1001 } else {
1002 vco_min = pll->pll_out_min;
1003 vco_max = pll->pll_out_max;
1004 }
1005
1006 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1007 vco_min *= 10;
1008 vco_max *= 10;
1009 }
1010
1011 post_div_min = vco_min / target_clock;
1012 if ((target_clock * post_div_min) < vco_min)
1013 ++post_div_min;
1014 if (post_div_min < pll->min_post_div)
1015 post_div_min = pll->min_post_div;
1016
1017 post_div_max = vco_max / target_clock;
1018 if ((target_clock * post_div_max) > vco_max)
1019 --post_div_max;
1020 if (post_div_max > pll->max_post_div)
1021 post_div_max = pll->max_post_div;
1022 }
1023
1024
1025 nom = target_clock;
1026 den = pll->reference_freq;
1027
1028
1029 avivo_reduce_ratio(&nom, &den, fb_div_min, post_div_min);
1030
1031
1032 if (pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP)
1033 post_div_best = post_div_min;
1034 else
1035 post_div_best = post_div_max;
1036 diff_best = ~0;
1037
1038 for (post_div = post_div_min; post_div <= post_div_max; ++post_div) {
1039 unsigned diff;
1040 avivo_get_fb_ref_div(nom, den, post_div, fb_div_max,
1041 ref_div_max, &fb_div, &ref_div);
1042 diff = abs(target_clock - (pll->reference_freq * fb_div) /
1043 (ref_div * post_div));
1044
1045 if (diff < diff_best || (diff == diff_best &&
1046 !(pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP))) {
1047
1048 post_div_best = post_div;
1049 diff_best = diff;
1050 }
1051 }
1052 post_div = post_div_best;
1053
1054
1055 avivo_get_fb_ref_div(nom, den, post_div, fb_div_max, ref_div_max,
1056 &fb_div, &ref_div);
1057
1058
1059
1060 avivo_reduce_ratio(&fb_div, &ref_div, fb_div_min, ref_div_min);
1061
1062
1063 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV && (fb_div % 10)) {
1064 fb_div_min = max(fb_div_min, (9 - (fb_div % 10)) * 20 + 50);
1065 if (fb_div < fb_div_min) {
1066 unsigned tmp = DIV_ROUND_UP(fb_div_min, fb_div);
1067 fb_div *= tmp;
1068 ref_div *= tmp;
1069 }
1070 }
1071
1072
1073 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1074 *fb_div_p = fb_div / 10;
1075 *frac_fb_div_p = fb_div % 10;
1076 } else {
1077 *fb_div_p = fb_div;
1078 *frac_fb_div_p = 0;
1079 }
1080
1081 *dot_clock_p = ((pll->reference_freq * *fb_div_p * 10) +
1082 (pll->reference_freq * *frac_fb_div_p)) /
1083 (ref_div * post_div * 10);
1084 *ref_div_p = ref_div;
1085 *post_div_p = post_div;
1086
1087 DRM_DEBUG_KMS("%d - %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1088 freq, *dot_clock_p * 10, *fb_div_p, *frac_fb_div_p,
1089 ref_div, post_div);
1090}
1091
1092
1093static inline uint32_t radeon_div(uint64_t n, uint32_t d)
1094{
1095 uint64_t mod;
1096
1097 n += d / 2;
1098
1099 mod = do_div(n, d);
1100 return n;
1101}
1102
1103void radeon_compute_pll_legacy(struct radeon_pll *pll,
1104 uint64_t freq,
1105 uint32_t *dot_clock_p,
1106 uint32_t *fb_div_p,
1107 uint32_t *frac_fb_div_p,
1108 uint32_t *ref_div_p,
1109 uint32_t *post_div_p)
1110{
1111 uint32_t min_ref_div = pll->min_ref_div;
1112 uint32_t max_ref_div = pll->max_ref_div;
1113 uint32_t min_post_div = pll->min_post_div;
1114 uint32_t max_post_div = pll->max_post_div;
1115 uint32_t min_fractional_feed_div = 0;
1116 uint32_t max_fractional_feed_div = 0;
1117 uint32_t best_vco = pll->best_vco;
1118 uint32_t best_post_div = 1;
1119 uint32_t best_ref_div = 1;
1120 uint32_t best_feedback_div = 1;
1121 uint32_t best_frac_feedback_div = 0;
1122 uint32_t best_freq = -1;
1123 uint32_t best_error = 0xffffffff;
1124 uint32_t best_vco_diff = 1;
1125 uint32_t post_div;
1126 u32 pll_out_min, pll_out_max;
1127
1128 DRM_DEBUG_KMS("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div);
1129 freq = freq * 1000;
1130
1131 if (pll->flags & RADEON_PLL_IS_LCD) {
1132 pll_out_min = pll->lcd_pll_out_min;
1133 pll_out_max = pll->lcd_pll_out_max;
1134 } else {
1135 pll_out_min = pll->pll_out_min;
1136 pll_out_max = pll->pll_out_max;
1137 }
1138
1139 if (pll_out_min > 64800)
1140 pll_out_min = 64800;
1141
1142 if (pll->flags & RADEON_PLL_USE_REF_DIV)
1143 min_ref_div = max_ref_div = pll->reference_div;
1144 else {
1145 while (min_ref_div < max_ref_div-1) {
1146 uint32_t mid = (min_ref_div + max_ref_div) / 2;
1147 uint32_t pll_in = pll->reference_freq / mid;
1148 if (pll_in < pll->pll_in_min)
1149 max_ref_div = mid;
1150 else if (pll_in > pll->pll_in_max)
1151 min_ref_div = mid;
1152 else
1153 break;
1154 }
1155 }
1156
1157 if (pll->flags & RADEON_PLL_USE_POST_DIV)
1158 min_post_div = max_post_div = pll->post_div;
1159
1160 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1161 min_fractional_feed_div = pll->min_frac_feedback_div;
1162 max_fractional_feed_div = pll->max_frac_feedback_div;
1163 }
1164
1165 for (post_div = max_post_div; post_div >= min_post_div; --post_div) {
1166 uint32_t ref_div;
1167
1168 if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1))
1169 continue;
1170
1171
1172 if (pll->flags & RADEON_PLL_LEGACY) {
1173 if ((post_div == 5) ||
1174 (post_div == 7) ||
1175 (post_div == 9) ||
1176 (post_div == 10) ||
1177 (post_div == 11) ||
1178 (post_div == 13) ||
1179 (post_div == 14) ||
1180 (post_div == 15))
1181 continue;
1182 }
1183
1184 for (ref_div = min_ref_div; ref_div <= max_ref_div; ++ref_div) {
1185 uint32_t feedback_div, current_freq = 0, error, vco_diff;
1186 uint32_t pll_in = pll->reference_freq / ref_div;
1187 uint32_t min_feed_div = pll->min_feedback_div;
1188 uint32_t max_feed_div = pll->max_feedback_div + 1;
1189
1190 if (pll_in < pll->pll_in_min || pll_in > pll->pll_in_max)
1191 continue;
1192
1193 while (min_feed_div < max_feed_div) {
1194 uint32_t vco;
1195 uint32_t min_frac_feed_div = min_fractional_feed_div;
1196 uint32_t max_frac_feed_div = max_fractional_feed_div + 1;
1197 uint32_t frac_feedback_div;
1198 uint64_t tmp;
1199
1200 feedback_div = (min_feed_div + max_feed_div) / 2;
1201
1202 tmp = (uint64_t)pll->reference_freq * feedback_div;
1203 vco = radeon_div(tmp, ref_div);
1204
1205 if (vco < pll_out_min) {
1206 min_feed_div = feedback_div + 1;
1207 continue;
1208 } else if (vco > pll_out_max) {
1209 max_feed_div = feedback_div;
1210 continue;
1211 }
1212
1213 while (min_frac_feed_div < max_frac_feed_div) {
1214 frac_feedback_div = (min_frac_feed_div + max_frac_feed_div) / 2;
1215 tmp = (uint64_t)pll->reference_freq * 10000 * feedback_div;
1216 tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div;
1217 current_freq = radeon_div(tmp, ref_div * post_div);
1218
1219 if (pll->flags & RADEON_PLL_PREFER_CLOSEST_LOWER) {
1220 if (freq < current_freq)
1221 error = 0xffffffff;
1222 else
1223 error = freq - current_freq;
1224 } else
1225 error = abs(current_freq - freq);
1226 vco_diff = abs(vco - best_vco);
1227
1228 if ((best_vco == 0 && error < best_error) ||
1229 (best_vco != 0 &&
1230 ((best_error > 100 && error < best_error - 100) ||
1231 (abs(error - best_error) < 100 && vco_diff < best_vco_diff)))) {
1232 best_post_div = post_div;
1233 best_ref_div = ref_div;
1234 best_feedback_div = feedback_div;
1235 best_frac_feedback_div = frac_feedback_div;
1236 best_freq = current_freq;
1237 best_error = error;
1238 best_vco_diff = vco_diff;
1239 } else if (current_freq == freq) {
1240 if (best_freq == -1) {
1241 best_post_div = post_div;
1242 best_ref_div = ref_div;
1243 best_feedback_div = feedback_div;
1244 best_frac_feedback_div = frac_feedback_div;
1245 best_freq = current_freq;
1246 best_error = error;
1247 best_vco_diff = vco_diff;
1248 } else if (((pll->flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) ||
1249 ((pll->flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) ||
1250 ((pll->flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) ||
1251 ((pll->flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) ||
1252 ((pll->flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) ||
1253 ((pll->flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) {
1254 best_post_div = post_div;
1255 best_ref_div = ref_div;
1256 best_feedback_div = feedback_div;
1257 best_frac_feedback_div = frac_feedback_div;
1258 best_freq = current_freq;
1259 best_error = error;
1260 best_vco_diff = vco_diff;
1261 }
1262 }
1263 if (current_freq < freq)
1264 min_frac_feed_div = frac_feedback_div + 1;
1265 else
1266 max_frac_feed_div = frac_feedback_div;
1267 }
1268 if (current_freq < freq)
1269 min_feed_div = feedback_div + 1;
1270 else
1271 max_feed_div = feedback_div;
1272 }
1273 }
1274 }
1275
1276 *dot_clock_p = best_freq / 10000;
1277 *fb_div_p = best_feedback_div;
1278 *frac_fb_div_p = best_frac_feedback_div;
1279 *ref_div_p = best_ref_div;
1280 *post_div_p = best_post_div;
1281 DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1282 (long long)freq,
1283 best_freq / 1000, best_feedback_div, best_frac_feedback_div,
1284 best_ref_div, best_post_div);
1285
1286}
1287
1288static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb)
1289{
1290 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
1291
1292 drm_gem_object_put_unlocked(radeon_fb->obj);
1293 drm_framebuffer_cleanup(fb);
1294 kfree(radeon_fb);
1295}
1296
1297static int radeon_user_framebuffer_create_handle(struct drm_framebuffer *fb,
1298 struct drm_file *file_priv,
1299 unsigned int *handle)
1300{
1301 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
1302
1303 return drm_gem_handle_create(file_priv, radeon_fb->obj, handle);
1304}
1305
1306static const struct drm_framebuffer_funcs radeon_fb_funcs = {
1307 .destroy = radeon_user_framebuffer_destroy,
1308 .create_handle = radeon_user_framebuffer_create_handle,
1309};
1310
1311int
1312radeon_framebuffer_init(struct drm_device *dev,
1313 struct radeon_framebuffer *rfb,
1314 const struct drm_mode_fb_cmd2 *mode_cmd,
1315 struct drm_gem_object *obj)
1316{
1317 int ret;
1318 rfb->obj = obj;
1319 drm_helper_mode_fill_fb_struct(dev, &rfb->base, mode_cmd);
1320 ret = drm_framebuffer_init(dev, &rfb->base, &radeon_fb_funcs);
1321 if (ret) {
1322 rfb->obj = NULL;
1323 return ret;
1324 }
1325 return 0;
1326}
1327
1328static struct drm_framebuffer *
1329radeon_user_framebuffer_create(struct drm_device *dev,
1330 struct drm_file *file_priv,
1331 const struct drm_mode_fb_cmd2 *mode_cmd)
1332{
1333 struct drm_gem_object *obj;
1334 struct radeon_framebuffer *radeon_fb;
1335 int ret;
1336
1337 obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]);
1338 if (obj == NULL) {
1339 dev_err(&dev->pdev->dev, "No GEM object associated to handle 0x%08X, "
1340 "can't create framebuffer\n", mode_cmd->handles[0]);
1341 return ERR_PTR(-ENOENT);
1342 }
1343
1344
1345 if (obj->import_attach) {
1346 DRM_DEBUG_KMS("Cannot create framebuffer from imported dma_buf\n");
1347 return ERR_PTR(-EINVAL);
1348 }
1349
1350 radeon_fb = kzalloc(sizeof(*radeon_fb), GFP_KERNEL);
1351 if (radeon_fb == NULL) {
1352 drm_gem_object_put_unlocked(obj);
1353 return ERR_PTR(-ENOMEM);
1354 }
1355
1356 ret = radeon_framebuffer_init(dev, radeon_fb, mode_cmd, obj);
1357 if (ret) {
1358 kfree(radeon_fb);
1359 drm_gem_object_put_unlocked(obj);
1360 return ERR_PTR(ret);
1361 }
1362
1363 return &radeon_fb->base;
1364}
1365
1366static const struct drm_mode_config_funcs radeon_mode_funcs = {
1367 .fb_create = radeon_user_framebuffer_create,
1368 .output_poll_changed = drm_fb_helper_output_poll_changed,
1369};
1370
1371static const struct drm_prop_enum_list radeon_tmds_pll_enum_list[] =
1372{ { 0, "driver" },
1373 { 1, "bios" },
1374};
1375
1376static const struct drm_prop_enum_list radeon_tv_std_enum_list[] =
1377{ { TV_STD_NTSC, "ntsc" },
1378 { TV_STD_PAL, "pal" },
1379 { TV_STD_PAL_M, "pal-m" },
1380 { TV_STD_PAL_60, "pal-60" },
1381 { TV_STD_NTSC_J, "ntsc-j" },
1382 { TV_STD_SCART_PAL, "scart-pal" },
1383 { TV_STD_PAL_CN, "pal-cn" },
1384 { TV_STD_SECAM, "secam" },
1385};
1386
1387static const struct drm_prop_enum_list radeon_underscan_enum_list[] =
1388{ { UNDERSCAN_OFF, "off" },
1389 { UNDERSCAN_ON, "on" },
1390 { UNDERSCAN_AUTO, "auto" },
1391};
1392
1393static const struct drm_prop_enum_list radeon_audio_enum_list[] =
1394{ { RADEON_AUDIO_DISABLE, "off" },
1395 { RADEON_AUDIO_ENABLE, "on" },
1396 { RADEON_AUDIO_AUTO, "auto" },
1397};
1398
1399
1400static const struct drm_prop_enum_list radeon_dither_enum_list[] =
1401{ { RADEON_FMT_DITHER_DISABLE, "off" },
1402 { RADEON_FMT_DITHER_ENABLE, "on" },
1403};
1404
1405static const struct drm_prop_enum_list radeon_output_csc_enum_list[] =
1406{ { RADEON_OUTPUT_CSC_BYPASS, "bypass" },
1407 { RADEON_OUTPUT_CSC_TVRGB, "tvrgb" },
1408 { RADEON_OUTPUT_CSC_YCBCR601, "ycbcr601" },
1409 { RADEON_OUTPUT_CSC_YCBCR709, "ycbcr709" },
1410};
1411
1412static int radeon_modeset_create_props(struct radeon_device *rdev)
1413{
1414 int sz;
1415
1416 if (rdev->is_atom_bios) {
1417 rdev->mode_info.coherent_mode_property =
1418 drm_property_create_range(rdev->ddev, 0 , "coherent", 0, 1);
1419 if (!rdev->mode_info.coherent_mode_property)
1420 return -ENOMEM;
1421 }
1422
1423 if (!ASIC_IS_AVIVO(rdev)) {
1424 sz = ARRAY_SIZE(radeon_tmds_pll_enum_list);
1425 rdev->mode_info.tmds_pll_property =
1426 drm_property_create_enum(rdev->ddev, 0,
1427 "tmds_pll",
1428 radeon_tmds_pll_enum_list, sz);
1429 }
1430
1431 rdev->mode_info.load_detect_property =
1432 drm_property_create_range(rdev->ddev, 0, "load detection", 0, 1);
1433 if (!rdev->mode_info.load_detect_property)
1434 return -ENOMEM;
1435
1436 drm_mode_create_scaling_mode_property(rdev->ddev);
1437
1438 sz = ARRAY_SIZE(radeon_tv_std_enum_list);
1439 rdev->mode_info.tv_std_property =
1440 drm_property_create_enum(rdev->ddev, 0,
1441 "tv standard",
1442 radeon_tv_std_enum_list, sz);
1443
1444 sz = ARRAY_SIZE(radeon_underscan_enum_list);
1445 rdev->mode_info.underscan_property =
1446 drm_property_create_enum(rdev->ddev, 0,
1447 "underscan",
1448 radeon_underscan_enum_list, sz);
1449
1450 rdev->mode_info.underscan_hborder_property =
1451 drm_property_create_range(rdev->ddev, 0,
1452 "underscan hborder", 0, 128);
1453 if (!rdev->mode_info.underscan_hborder_property)
1454 return -ENOMEM;
1455
1456 rdev->mode_info.underscan_vborder_property =
1457 drm_property_create_range(rdev->ddev, 0,
1458 "underscan vborder", 0, 128);
1459 if (!rdev->mode_info.underscan_vborder_property)
1460 return -ENOMEM;
1461
1462 sz = ARRAY_SIZE(radeon_audio_enum_list);
1463 rdev->mode_info.audio_property =
1464 drm_property_create_enum(rdev->ddev, 0,
1465 "audio",
1466 radeon_audio_enum_list, sz);
1467
1468 sz = ARRAY_SIZE(radeon_dither_enum_list);
1469 rdev->mode_info.dither_property =
1470 drm_property_create_enum(rdev->ddev, 0,
1471 "dither",
1472 radeon_dither_enum_list, sz);
1473
1474 sz = ARRAY_SIZE(radeon_output_csc_enum_list);
1475 rdev->mode_info.output_csc_property =
1476 drm_property_create_enum(rdev->ddev, 0,
1477 "output_csc",
1478 radeon_output_csc_enum_list, sz);
1479
1480 return 0;
1481}
1482
1483void radeon_update_display_priority(struct radeon_device *rdev)
1484{
1485
1486 if ((radeon_disp_priority == 0) || (radeon_disp_priority > 2)) {
1487
1488
1489
1490
1491
1492
1493 if ((ASIC_IS_R300(rdev) || (rdev->family == CHIP_RV515)) &&
1494 !(rdev->flags & RADEON_IS_IGP))
1495 rdev->disp_priority = 2;
1496 else
1497 rdev->disp_priority = 0;
1498 } else
1499 rdev->disp_priority = radeon_disp_priority;
1500
1501}
1502
1503
1504
1505
1506static void radeon_afmt_init(struct radeon_device *rdev)
1507{
1508 int i;
1509
1510 for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++)
1511 rdev->mode_info.afmt[i] = NULL;
1512
1513 if (ASIC_IS_NODCE(rdev)) {
1514
1515 } else if (ASIC_IS_DCE4(rdev)) {
1516 static uint32_t eg_offsets[] = {
1517 EVERGREEN_CRTC0_REGISTER_OFFSET,
1518 EVERGREEN_CRTC1_REGISTER_OFFSET,
1519 EVERGREEN_CRTC2_REGISTER_OFFSET,
1520 EVERGREEN_CRTC3_REGISTER_OFFSET,
1521 EVERGREEN_CRTC4_REGISTER_OFFSET,
1522 EVERGREEN_CRTC5_REGISTER_OFFSET,
1523 0x13830 - 0x7030,
1524 };
1525 int num_afmt;
1526
1527
1528
1529
1530
1531 if (ASIC_IS_DCE8(rdev))
1532 num_afmt = 7;
1533 else if (ASIC_IS_DCE6(rdev))
1534 num_afmt = 6;
1535 else if (ASIC_IS_DCE5(rdev))
1536 num_afmt = 6;
1537 else if (ASIC_IS_DCE41(rdev))
1538 num_afmt = 2;
1539 else
1540 num_afmt = 6;
1541
1542 BUG_ON(num_afmt > ARRAY_SIZE(eg_offsets));
1543 for (i = 0; i < num_afmt; i++) {
1544 rdev->mode_info.afmt[i] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1545 if (rdev->mode_info.afmt[i]) {
1546 rdev->mode_info.afmt[i]->offset = eg_offsets[i];
1547 rdev->mode_info.afmt[i]->id = i;
1548 }
1549 }
1550 } else if (ASIC_IS_DCE3(rdev)) {
1551
1552 rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1553 if (rdev->mode_info.afmt[0]) {
1554 rdev->mode_info.afmt[0]->offset = DCE3_HDMI_OFFSET0;
1555 rdev->mode_info.afmt[0]->id = 0;
1556 }
1557 rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1558 if (rdev->mode_info.afmt[1]) {
1559 rdev->mode_info.afmt[1]->offset = DCE3_HDMI_OFFSET1;
1560 rdev->mode_info.afmt[1]->id = 1;
1561 }
1562 } else if (ASIC_IS_DCE2(rdev)) {
1563
1564 rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1565 if (rdev->mode_info.afmt[0]) {
1566 rdev->mode_info.afmt[0]->offset = DCE2_HDMI_OFFSET0;
1567 rdev->mode_info.afmt[0]->id = 0;
1568 }
1569
1570 if (rdev->family >= CHIP_R600) {
1571 rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1572 if (rdev->mode_info.afmt[1]) {
1573 rdev->mode_info.afmt[1]->offset = DCE2_HDMI_OFFSET1;
1574 rdev->mode_info.afmt[1]->id = 1;
1575 }
1576 }
1577 }
1578}
1579
1580static void radeon_afmt_fini(struct radeon_device *rdev)
1581{
1582 int i;
1583
1584 for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++) {
1585 kfree(rdev->mode_info.afmt[i]);
1586 rdev->mode_info.afmt[i] = NULL;
1587 }
1588}
1589
1590int radeon_modeset_init(struct radeon_device *rdev)
1591{
1592 int i;
1593 int ret;
1594
1595 drm_mode_config_init(rdev->ddev);
1596 rdev->mode_info.mode_config_initialized = true;
1597
1598 rdev->ddev->mode_config.funcs = &radeon_mode_funcs;
1599
1600 if (radeon_use_pflipirq == 2 && rdev->family >= CHIP_R600)
1601 rdev->ddev->mode_config.async_page_flip = true;
1602
1603 if (ASIC_IS_DCE5(rdev)) {
1604 rdev->ddev->mode_config.max_width = 16384;
1605 rdev->ddev->mode_config.max_height = 16384;
1606 } else if (ASIC_IS_AVIVO(rdev)) {
1607 rdev->ddev->mode_config.max_width = 8192;
1608 rdev->ddev->mode_config.max_height = 8192;
1609 } else {
1610 rdev->ddev->mode_config.max_width = 4096;
1611 rdev->ddev->mode_config.max_height = 4096;
1612 }
1613
1614 rdev->ddev->mode_config.preferred_depth = 24;
1615 rdev->ddev->mode_config.prefer_shadow = 1;
1616
1617 rdev->ddev->mode_config.fb_base = rdev->mc.aper_base;
1618
1619 ret = radeon_modeset_create_props(rdev);
1620 if (ret) {
1621 return ret;
1622 }
1623
1624
1625 radeon_i2c_init(rdev);
1626
1627
1628 if (!rdev->is_atom_bios) {
1629
1630 radeon_combios_check_hardcoded_edid(rdev);
1631 }
1632
1633
1634 for (i = 0; i < rdev->num_crtc; i++) {
1635 radeon_crtc_init(rdev->ddev, i);
1636 }
1637
1638
1639 ret = radeon_setup_enc_conn(rdev->ddev);
1640 if (!ret) {
1641 return ret;
1642 }
1643
1644
1645 if (rdev->is_atom_bios) {
1646 radeon_atom_encoder_init(rdev);
1647 radeon_atom_disp_eng_pll_init(rdev);
1648 }
1649
1650
1651 radeon_hpd_init(rdev);
1652
1653
1654 radeon_afmt_init(rdev);
1655
1656 radeon_fbdev_init(rdev);
1657 drm_kms_helper_poll_init(rdev->ddev);
1658
1659
1660 ret = radeon_pm_late_init(rdev);
1661
1662 return 0;
1663}
1664
1665void radeon_modeset_fini(struct radeon_device *rdev)
1666{
1667 if (rdev->mode_info.mode_config_initialized) {
1668 drm_kms_helper_poll_fini(rdev->ddev);
1669 radeon_hpd_fini(rdev);
1670 drm_crtc_force_disable_all(rdev->ddev);
1671 radeon_fbdev_fini(rdev);
1672 radeon_afmt_fini(rdev);
1673 drm_mode_config_cleanup(rdev->ddev);
1674 rdev->mode_info.mode_config_initialized = false;
1675 }
1676
1677 kfree(rdev->mode_info.bios_hardcoded_edid);
1678
1679
1680 radeon_i2c_fini(rdev);
1681}
1682
1683static bool is_hdtv_mode(const struct drm_display_mode *mode)
1684{
1685
1686 if ((mode->vdisplay == 480 && mode->hdisplay == 720) ||
1687 (mode->vdisplay == 576) ||
1688 (mode->vdisplay == 720) ||
1689 (mode->vdisplay == 1080))
1690 return true;
1691 else
1692 return false;
1693}
1694
1695bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
1696 const struct drm_display_mode *mode,
1697 struct drm_display_mode *adjusted_mode)
1698{
1699 struct drm_device *dev = crtc->dev;
1700 struct radeon_device *rdev = dev->dev_private;
1701 struct drm_encoder *encoder;
1702 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1703 struct radeon_encoder *radeon_encoder;
1704 struct drm_connector *connector;
1705 struct radeon_connector *radeon_connector;
1706 bool first = true;
1707 u32 src_v = 1, dst_v = 1;
1708 u32 src_h = 1, dst_h = 1;
1709
1710 radeon_crtc->h_border = 0;
1711 radeon_crtc->v_border = 0;
1712
1713 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1714 if (encoder->crtc != crtc)
1715 continue;
1716 radeon_encoder = to_radeon_encoder(encoder);
1717 connector = radeon_get_connector_for_encoder(encoder);
1718 radeon_connector = to_radeon_connector(connector);
1719
1720 if (first) {
1721
1722 if (radeon_encoder->rmx_type == RMX_OFF)
1723 radeon_crtc->rmx_type = RMX_OFF;
1724 else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay ||
1725 mode->vdisplay < radeon_encoder->native_mode.vdisplay)
1726 radeon_crtc->rmx_type = radeon_encoder->rmx_type;
1727 else
1728 radeon_crtc->rmx_type = RMX_OFF;
1729
1730 memcpy(&radeon_crtc->native_mode,
1731 &radeon_encoder->native_mode,
1732 sizeof(struct drm_display_mode));
1733 src_v = crtc->mode.vdisplay;
1734 dst_v = radeon_crtc->native_mode.vdisplay;
1735 src_h = crtc->mode.hdisplay;
1736 dst_h = radeon_crtc->native_mode.hdisplay;
1737
1738
1739 if (ASIC_IS_AVIVO(rdev) &&
1740 (!(mode->flags & DRM_MODE_FLAG_INTERLACE)) &&
1741 ((radeon_encoder->underscan_type == UNDERSCAN_ON) ||
1742 ((radeon_encoder->underscan_type == UNDERSCAN_AUTO) &&
1743 drm_detect_hdmi_monitor(radeon_connector_edid(connector)) &&
1744 is_hdtv_mode(mode)))) {
1745 if (radeon_encoder->underscan_hborder != 0)
1746 radeon_crtc->h_border = radeon_encoder->underscan_hborder;
1747 else
1748 radeon_crtc->h_border = (mode->hdisplay >> 5) + 16;
1749 if (radeon_encoder->underscan_vborder != 0)
1750 radeon_crtc->v_border = radeon_encoder->underscan_vborder;
1751 else
1752 radeon_crtc->v_border = (mode->vdisplay >> 5) + 16;
1753 radeon_crtc->rmx_type = RMX_FULL;
1754 src_v = crtc->mode.vdisplay;
1755 dst_v = crtc->mode.vdisplay - (radeon_crtc->v_border * 2);
1756 src_h = crtc->mode.hdisplay;
1757 dst_h = crtc->mode.hdisplay - (radeon_crtc->h_border * 2);
1758 }
1759 first = false;
1760 } else {
1761 if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) {
1762
1763
1764
1765
1766
1767
1768 DRM_ERROR("Scaling not consistent across encoder.\n");
1769 return false;
1770 }
1771 }
1772 }
1773 if (radeon_crtc->rmx_type != RMX_OFF) {
1774 fixed20_12 a, b;
1775 a.full = dfixed_const(src_v);
1776 b.full = dfixed_const(dst_v);
1777 radeon_crtc->vsc.full = dfixed_div(a, b);
1778 a.full = dfixed_const(src_h);
1779 b.full = dfixed_const(dst_h);
1780 radeon_crtc->hsc.full = dfixed_div(a, b);
1781 } else {
1782 radeon_crtc->vsc.full = dfixed_const(1);
1783 radeon_crtc->hsc.full = dfixed_const(1);
1784 }
1785 return true;
1786}
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825int radeon_get_crtc_scanoutpos(struct drm_device *dev, unsigned int pipe,
1826 unsigned int flags, int *vpos, int *hpos,
1827 ktime_t *stime, ktime_t *etime,
1828 const struct drm_display_mode *mode)
1829{
1830 u32 stat_crtc = 0, vbl = 0, position = 0;
1831 int vbl_start, vbl_end, vtotal, ret = 0;
1832 bool in_vbl = true;
1833
1834 struct radeon_device *rdev = dev->dev_private;
1835
1836
1837
1838
1839 if (stime)
1840 *stime = ktime_get();
1841
1842 if (ASIC_IS_DCE4(rdev)) {
1843 if (pipe == 0) {
1844 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1845 EVERGREEN_CRTC0_REGISTER_OFFSET);
1846 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1847 EVERGREEN_CRTC0_REGISTER_OFFSET);
1848 ret |= DRM_SCANOUTPOS_VALID;
1849 }
1850 if (pipe == 1) {
1851 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1852 EVERGREEN_CRTC1_REGISTER_OFFSET);
1853 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1854 EVERGREEN_CRTC1_REGISTER_OFFSET);
1855 ret |= DRM_SCANOUTPOS_VALID;
1856 }
1857 if (pipe == 2) {
1858 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1859 EVERGREEN_CRTC2_REGISTER_OFFSET);
1860 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1861 EVERGREEN_CRTC2_REGISTER_OFFSET);
1862 ret |= DRM_SCANOUTPOS_VALID;
1863 }
1864 if (pipe == 3) {
1865 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1866 EVERGREEN_CRTC3_REGISTER_OFFSET);
1867 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1868 EVERGREEN_CRTC3_REGISTER_OFFSET);
1869 ret |= DRM_SCANOUTPOS_VALID;
1870 }
1871 if (pipe == 4) {
1872 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1873 EVERGREEN_CRTC4_REGISTER_OFFSET);
1874 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1875 EVERGREEN_CRTC4_REGISTER_OFFSET);
1876 ret |= DRM_SCANOUTPOS_VALID;
1877 }
1878 if (pipe == 5) {
1879 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1880 EVERGREEN_CRTC5_REGISTER_OFFSET);
1881 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1882 EVERGREEN_CRTC5_REGISTER_OFFSET);
1883 ret |= DRM_SCANOUTPOS_VALID;
1884 }
1885 } else if (ASIC_IS_AVIVO(rdev)) {
1886 if (pipe == 0) {
1887 vbl = RREG32(AVIVO_D1CRTC_V_BLANK_START_END);
1888 position = RREG32(AVIVO_D1CRTC_STATUS_POSITION);
1889 ret |= DRM_SCANOUTPOS_VALID;
1890 }
1891 if (pipe == 1) {
1892 vbl = RREG32(AVIVO_D2CRTC_V_BLANK_START_END);
1893 position = RREG32(AVIVO_D2CRTC_STATUS_POSITION);
1894 ret |= DRM_SCANOUTPOS_VALID;
1895 }
1896 } else {
1897
1898 if (pipe == 0) {
1899
1900
1901
1902 vbl = (RREG32(RADEON_CRTC_V_TOTAL_DISP) &
1903 RADEON_CRTC_V_DISP) >> RADEON_CRTC_V_DISP_SHIFT;
1904
1905 position = (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL;
1906 stat_crtc = RREG32(RADEON_CRTC_STATUS);
1907 if (!(stat_crtc & 1))
1908 in_vbl = false;
1909
1910 ret |= DRM_SCANOUTPOS_VALID;
1911 }
1912 if (pipe == 1) {
1913 vbl = (RREG32(RADEON_CRTC2_V_TOTAL_DISP) &
1914 RADEON_CRTC_V_DISP) >> RADEON_CRTC_V_DISP_SHIFT;
1915 position = (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL;
1916 stat_crtc = RREG32(RADEON_CRTC2_STATUS);
1917 if (!(stat_crtc & 1))
1918 in_vbl = false;
1919
1920 ret |= DRM_SCANOUTPOS_VALID;
1921 }
1922 }
1923
1924
1925 if (etime)
1926 *etime = ktime_get();
1927
1928
1929
1930
1931 *vpos = position & 0x1fff;
1932 *hpos = (position >> 16) & 0x1fff;
1933
1934
1935 if (vbl > 0) {
1936
1937 ret |= DRM_SCANOUTPOS_ACCURATE;
1938 vbl_start = vbl & 0x1fff;
1939 vbl_end = (vbl >> 16) & 0x1fff;
1940 }
1941 else {
1942
1943 vbl_start = mode->crtc_vdisplay;
1944 vbl_end = 0;
1945 }
1946
1947
1948 if (flags & GET_DISTANCE_TO_VBLANKSTART) {
1949
1950 *hpos = *vpos - vbl_start;
1951 }
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963 if (!(flags & USE_REAL_VBLANKSTART))
1964 vbl_start -= rdev->mode_info.crtcs[pipe]->lb_vblank_lead_lines;
1965
1966
1967 if ((*vpos < vbl_start) && (*vpos >= vbl_end))
1968 in_vbl = false;
1969
1970
1971 if (in_vbl)
1972 ret |= DRM_SCANOUTPOS_IN_VBLANK;
1973
1974
1975 if (flags & GET_DISTANCE_TO_VBLANKSTART) {
1976
1977 *vpos -= vbl_start;
1978 return ret;
1979 }
1980
1981
1982
1983
1984
1985
1986
1987
1988 if (in_vbl && (*vpos >= vbl_start)) {
1989 vtotal = mode->crtc_vtotal;
1990 *vpos = *vpos - vtotal;
1991 }
1992
1993
1994 *vpos = *vpos - vbl_end;
1995
1996 return ret;
1997}
1998