1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <drm/drmP.h>
27#include <drm/radeon_drm.h>
28#include "radeon.h"
29
30#include "atom.h"
31#include <asm/div64.h>
32
33#include <linux/pm_runtime.h>
34#include <drm/drm_crtc_helper.h>
35#include <drm/drm_plane_helper.h>
36#include <drm/drm_edid.h>
37
38#include <linux/gcd.h>
39
40static void avivo_crtc_load_lut(struct drm_crtc *crtc)
41{
42 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
43 struct drm_device *dev = crtc->dev;
44 struct radeon_device *rdev = dev->dev_private;
45 int i;
46
47 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
48 WREG32(AVIVO_DC_LUTA_CONTROL + radeon_crtc->crtc_offset, 0);
49
50 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
51 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
52 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
53
54 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
55 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
56 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
57
58 WREG32(AVIVO_DC_LUT_RW_SELECT, radeon_crtc->crtc_id);
59 WREG32(AVIVO_DC_LUT_RW_MODE, 0);
60 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK, 0x0000003f);
61
62 WREG8(AVIVO_DC_LUT_RW_INDEX, 0);
63 for (i = 0; i < 256; i++) {
64 WREG32(AVIVO_DC_LUT_30_COLOR,
65 (radeon_crtc->lut_r[i] << 20) |
66 (radeon_crtc->lut_g[i] << 10) |
67 (radeon_crtc->lut_b[i] << 0));
68 }
69
70
71 WREG32_P(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id, ~1);
72}
73
74static void dce4_crtc_load_lut(struct drm_crtc *crtc)
75{
76 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
77 struct drm_device *dev = crtc->dev;
78 struct radeon_device *rdev = dev->dev_private;
79 int i;
80
81 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
82 WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
83
84 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
85 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
86 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
87
88 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
89 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
90 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
91
92 WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
93 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
94
95 WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
96 for (i = 0; i < 256; i++) {
97 WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
98 (radeon_crtc->lut_r[i] << 20) |
99 (radeon_crtc->lut_g[i] << 10) |
100 (radeon_crtc->lut_b[i] << 0));
101 }
102}
103
104static void dce5_crtc_load_lut(struct drm_crtc *crtc)
105{
106 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
107 struct drm_device *dev = crtc->dev;
108 struct radeon_device *rdev = dev->dev_private;
109 int i;
110
111 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
112
113 WREG32(NI_INPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
114 (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS) |
115 NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS)));
116 WREG32(NI_PRESCALE_GRPH_CONTROL + radeon_crtc->crtc_offset,
117 NI_GRPH_PRESCALE_BYPASS);
118 WREG32(NI_PRESCALE_OVL_CONTROL + radeon_crtc->crtc_offset,
119 NI_OVL_PRESCALE_BYPASS);
120 WREG32(NI_INPUT_GAMMA_CONTROL + radeon_crtc->crtc_offset,
121 (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT) |
122 NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT)));
123
124 WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
125
126 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
127 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
128 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
129
130 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
131 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
132 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
133
134 WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
135 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
136
137 WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
138 for (i = 0; i < 256; i++) {
139 WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
140 (radeon_crtc->lut_r[i] << 20) |
141 (radeon_crtc->lut_g[i] << 10) |
142 (radeon_crtc->lut_b[i] << 0));
143 }
144
145 WREG32(NI_DEGAMMA_CONTROL + radeon_crtc->crtc_offset,
146 (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
147 NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
148 NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
149 NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS)));
150 WREG32(NI_GAMUT_REMAP_CONTROL + radeon_crtc->crtc_offset,
151 (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS) |
152 NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS)));
153 WREG32(NI_REGAMMA_CONTROL + radeon_crtc->crtc_offset,
154 (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS) |
155 NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS)));
156 WREG32(NI_OUTPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
157 (NI_OUTPUT_CSC_GRPH_MODE(radeon_crtc->output_csc) |
158 NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS)));
159
160 WREG32(0x6940 + radeon_crtc->crtc_offset, 0);
161 if (ASIC_IS_DCE8(rdev)) {
162
163
164
165 WREG32(CIK_ALPHA_CONTROL + radeon_crtc->crtc_offset,
166 CIK_CURSOR_ALPHA_BLND_ENA);
167 }
168}
169
170static void legacy_crtc_load_lut(struct drm_crtc *crtc)
171{
172 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
173 struct drm_device *dev = crtc->dev;
174 struct radeon_device *rdev = dev->dev_private;
175 int i;
176 uint32_t dac2_cntl;
177
178 dac2_cntl = RREG32(RADEON_DAC_CNTL2);
179 if (radeon_crtc->crtc_id == 0)
180 dac2_cntl &= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL;
181 else
182 dac2_cntl |= RADEON_DAC2_PALETTE_ACC_CTL;
183 WREG32(RADEON_DAC_CNTL2, dac2_cntl);
184
185 WREG8(RADEON_PALETTE_INDEX, 0);
186 for (i = 0; i < 256; i++) {
187 WREG32(RADEON_PALETTE_30_DATA,
188 (radeon_crtc->lut_r[i] << 20) |
189 (radeon_crtc->lut_g[i] << 10) |
190 (radeon_crtc->lut_b[i] << 0));
191 }
192}
193
194void radeon_crtc_load_lut(struct drm_crtc *crtc)
195{
196 struct drm_device *dev = crtc->dev;
197 struct radeon_device *rdev = dev->dev_private;
198
199 if (!crtc->enabled)
200 return;
201
202 if (ASIC_IS_DCE5(rdev))
203 dce5_crtc_load_lut(crtc);
204 else if (ASIC_IS_DCE4(rdev))
205 dce4_crtc_load_lut(crtc);
206 else if (ASIC_IS_AVIVO(rdev))
207 avivo_crtc_load_lut(crtc);
208 else
209 legacy_crtc_load_lut(crtc);
210}
211
212
213void radeon_crtc_fb_gamma_set(struct drm_crtc *crtc, u16 red, u16 green,
214 u16 blue, int regno)
215{
216 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
217
218 radeon_crtc->lut_r[regno] = red >> 6;
219 radeon_crtc->lut_g[regno] = green >> 6;
220 radeon_crtc->lut_b[regno] = blue >> 6;
221}
222
223
224void radeon_crtc_fb_gamma_get(struct drm_crtc *crtc, u16 *red, u16 *green,
225 u16 *blue, int regno)
226{
227 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
228
229 *red = radeon_crtc->lut_r[regno] << 6;
230 *green = radeon_crtc->lut_g[regno] << 6;
231 *blue = radeon_crtc->lut_b[regno] << 6;
232}
233
234static int radeon_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green,
235 u16 *blue, uint32_t size)
236{
237 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
238 int i;
239
240
241 for (i = 0; i < size; i++) {
242 radeon_crtc->lut_r[i] = red[i] >> 6;
243 radeon_crtc->lut_g[i] = green[i] >> 6;
244 radeon_crtc->lut_b[i] = blue[i] >> 6;
245 }
246 radeon_crtc_load_lut(crtc);
247
248 return 0;
249}
250
251static void radeon_crtc_destroy(struct drm_crtc *crtc)
252{
253 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
254
255 drm_crtc_cleanup(crtc);
256 destroy_workqueue(radeon_crtc->flip_queue);
257 kfree(radeon_crtc);
258}
259
260
261
262
263
264
265
266
267static void radeon_unpin_work_func(struct work_struct *__work)
268{
269 struct radeon_flip_work *work =
270 container_of(__work, struct radeon_flip_work, unpin_work);
271 int r;
272
273
274 r = radeon_bo_reserve(work->old_rbo, false);
275 if (likely(r == 0)) {
276 r = radeon_bo_unpin(work->old_rbo);
277 if (unlikely(r != 0)) {
278 DRM_ERROR("failed to unpin buffer after flip\n");
279 }
280 radeon_bo_unreserve(work->old_rbo);
281 } else
282 DRM_ERROR("failed to reserve buffer after flip\n");
283
284 drm_gem_object_unreference_unlocked(&work->old_rbo->gem_base);
285 kfree(work);
286}
287
288void radeon_crtc_handle_vblank(struct radeon_device *rdev, int crtc_id)
289{
290 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
291 unsigned long flags;
292 u32 update_pending;
293 int vpos, hpos;
294
295
296 if (radeon_crtc == NULL)
297 return;
298
299
300
301
302
303
304
305
306
307
308 if ((radeon_use_pflipirq == 2) && ASIC_IS_DCE4(rdev))
309 return;
310
311 spin_lock_irqsave(&rdev->ddev->event_lock, flags);
312 if (radeon_crtc->flip_status != RADEON_FLIP_SUBMITTED) {
313 DRM_DEBUG_DRIVER("radeon_crtc->flip_status = %d != "
314 "RADEON_FLIP_SUBMITTED(%d)\n",
315 radeon_crtc->flip_status,
316 RADEON_FLIP_SUBMITTED);
317 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
318 return;
319 }
320
321 update_pending = radeon_page_flip_pending(rdev, crtc_id);
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341 if (update_pending &&
342 (DRM_SCANOUTPOS_VALID &
343 radeon_get_crtc_scanoutpos(rdev->ddev, crtc_id,
344 GET_DISTANCE_TO_VBLANKSTART,
345 &vpos, &hpos, NULL, NULL,
346 &rdev->mode_info.crtcs[crtc_id]->base.hwmode)) &&
347 ((vpos >= 0 && hpos < 0) || (hpos >= 0 && !ASIC_IS_AVIVO(rdev)))) {
348
349
350
351
352
353
354 update_pending = 0;
355 }
356 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
357 if (!update_pending)
358 radeon_crtc_handle_flip(rdev, crtc_id);
359}
360
361
362
363
364
365
366
367
368
369void radeon_crtc_handle_flip(struct radeon_device *rdev, int crtc_id)
370{
371 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
372 struct radeon_flip_work *work;
373 unsigned long flags;
374
375
376 if (radeon_crtc == NULL)
377 return;
378
379 spin_lock_irqsave(&rdev->ddev->event_lock, flags);
380 work = radeon_crtc->flip_work;
381 if (radeon_crtc->flip_status != RADEON_FLIP_SUBMITTED) {
382 DRM_DEBUG_DRIVER("radeon_crtc->flip_status = %d != "
383 "RADEON_FLIP_SUBMITTED(%d)\n",
384 radeon_crtc->flip_status,
385 RADEON_FLIP_SUBMITTED);
386 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
387 return;
388 }
389
390
391 radeon_crtc->flip_status = RADEON_FLIP_NONE;
392 radeon_crtc->flip_work = NULL;
393
394
395 if (work->event)
396 drm_crtc_send_vblank_event(&radeon_crtc->base, work->event);
397
398 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
399
400 drm_crtc_vblank_put(&radeon_crtc->base);
401 radeon_irq_kms_pflip_irq_put(rdev, work->crtc_id);
402 queue_work(radeon_crtc->flip_queue, &work->unpin_work);
403}
404
405
406
407
408
409
410
411
412static void radeon_flip_work_func(struct work_struct *__work)
413{
414 struct radeon_flip_work *work =
415 container_of(__work, struct radeon_flip_work, flip_work);
416 struct radeon_device *rdev = work->rdev;
417 struct drm_device *dev = rdev->ddev;
418 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[work->crtc_id];
419
420 struct drm_crtc *crtc = &radeon_crtc->base;
421 unsigned long flags;
422 int r;
423 int vpos, hpos;
424
425 down_read(&rdev->exclusive_lock);
426 if (work->fence) {
427 struct radeon_fence *fence;
428
429 fence = to_radeon_fence(work->fence);
430 if (fence && fence->rdev == rdev) {
431 r = radeon_fence_wait(fence, false);
432 if (r == -EDEADLK) {
433 up_read(&rdev->exclusive_lock);
434 do {
435 r = radeon_gpu_reset(rdev);
436 } while (r == -EAGAIN);
437 down_read(&rdev->exclusive_lock);
438 }
439 } else
440 r = fence_wait(work->fence, false);
441
442 if (r)
443 DRM_ERROR("failed to wait on page flip fence (%d)!\n", r);
444
445
446
447
448
449
450 fence_put(work->fence);
451 work->fence = NULL;
452 }
453
454
455
456
457
458
459 while (radeon_crtc->enabled &&
460 (radeon_get_crtc_scanoutpos(dev, work->crtc_id, 0,
461 &vpos, &hpos, NULL, NULL,
462 &crtc->hwmode)
463 & (DRM_SCANOUTPOS_VALID | DRM_SCANOUTPOS_IN_VBLANK)) ==
464 (DRM_SCANOUTPOS_VALID | DRM_SCANOUTPOS_IN_VBLANK) &&
465 (!ASIC_IS_AVIVO(rdev) ||
466 ((int) (work->target_vblank -
467 dev->driver->get_vblank_counter(dev, work->crtc_id)) > 0)))
468 usleep_range(1000, 2000);
469
470
471 spin_lock_irqsave(&crtc->dev->event_lock, flags);
472
473
474 radeon_irq_kms_pflip_irq_get(rdev, radeon_crtc->crtc_id);
475
476
477 radeon_page_flip(rdev, radeon_crtc->crtc_id, work->base, work->async);
478
479 radeon_crtc->flip_status = RADEON_FLIP_SUBMITTED;
480 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
481 up_read(&rdev->exclusive_lock);
482}
483
484static int radeon_crtc_page_flip_target(struct drm_crtc *crtc,
485 struct drm_framebuffer *fb,
486 struct drm_pending_vblank_event *event,
487 uint32_t page_flip_flags,
488 uint32_t target)
489{
490 struct drm_device *dev = crtc->dev;
491 struct radeon_device *rdev = dev->dev_private;
492 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
493 struct radeon_framebuffer *old_radeon_fb;
494 struct radeon_framebuffer *new_radeon_fb;
495 struct drm_gem_object *obj;
496 struct radeon_flip_work *work;
497 struct radeon_bo *new_rbo;
498 uint32_t tiling_flags, pitch_pixels;
499 uint64_t base;
500 unsigned long flags;
501 int r;
502
503 work = kzalloc(sizeof *work, GFP_KERNEL);
504 if (work == NULL)
505 return -ENOMEM;
506
507 INIT_WORK(&work->flip_work, radeon_flip_work_func);
508 INIT_WORK(&work->unpin_work, radeon_unpin_work_func);
509
510 work->rdev = rdev;
511 work->crtc_id = radeon_crtc->crtc_id;
512 work->event = event;
513 work->async = (page_flip_flags & DRM_MODE_PAGE_FLIP_ASYNC) != 0;
514
515
516 old_radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
517 obj = old_radeon_fb->obj;
518
519
520 drm_gem_object_reference(obj);
521 work->old_rbo = gem_to_radeon_bo(obj);
522
523 new_radeon_fb = to_radeon_framebuffer(fb);
524 obj = new_radeon_fb->obj;
525 new_rbo = gem_to_radeon_bo(obj);
526
527
528 DRM_DEBUG_DRIVER("flip-ioctl() cur_rbo = %p, new_rbo = %p\n",
529 work->old_rbo, new_rbo);
530
531 r = radeon_bo_reserve(new_rbo, false);
532 if (unlikely(r != 0)) {
533 DRM_ERROR("failed to reserve new rbo buffer before flip\n");
534 goto cleanup;
535 }
536
537 r = radeon_bo_pin_restricted(new_rbo, RADEON_GEM_DOMAIN_VRAM,
538 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, &base);
539 if (unlikely(r != 0)) {
540 radeon_bo_unreserve(new_rbo);
541 r = -EINVAL;
542 DRM_ERROR("failed to pin new rbo buffer before flip\n");
543 goto cleanup;
544 }
545 work->fence = fence_get(reservation_object_get_excl(new_rbo->tbo.resv));
546 radeon_bo_get_tiling_flags(new_rbo, &tiling_flags, NULL);
547 radeon_bo_unreserve(new_rbo);
548
549 if (!ASIC_IS_AVIVO(rdev)) {
550
551 base -= radeon_crtc->legacy_display_base_addr;
552 pitch_pixels = fb->pitches[0] / (fb->bits_per_pixel / 8);
553
554 if (tiling_flags & RADEON_TILING_MACRO) {
555 if (ASIC_IS_R300(rdev)) {
556 base &= ~0x7ff;
557 } else {
558 int byteshift = fb->bits_per_pixel >> 4;
559 int tile_addr = (((crtc->y >> 3) * pitch_pixels + crtc->x) >> (8 - byteshift)) << 11;
560 base += tile_addr + ((crtc->x << byteshift) % 256) + ((crtc->y % 8) << 8);
561 }
562 } else {
563 int offset = crtc->y * pitch_pixels + crtc->x;
564 switch (fb->bits_per_pixel) {
565 case 8:
566 default:
567 offset *= 1;
568 break;
569 case 15:
570 case 16:
571 offset *= 2;
572 break;
573 case 24:
574 offset *= 3;
575 break;
576 case 32:
577 offset *= 4;
578 break;
579 }
580 base += offset;
581 }
582 base &= ~7;
583 }
584 work->base = base;
585 work->target_vblank = target - drm_crtc_vblank_count(crtc) +
586 dev->driver->get_vblank_counter(dev, work->crtc_id);
587
588
589 spin_lock_irqsave(&crtc->dev->event_lock, flags);
590
591 if (radeon_crtc->flip_status != RADEON_FLIP_NONE) {
592 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
593 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
594 r = -EBUSY;
595 goto pflip_cleanup;
596 }
597 radeon_crtc->flip_status = RADEON_FLIP_PENDING;
598 radeon_crtc->flip_work = work;
599
600
601 crtc->primary->fb = fb;
602
603 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
604
605 queue_work(radeon_crtc->flip_queue, &work->flip_work);
606 return 0;
607
608pflip_cleanup:
609 if (unlikely(radeon_bo_reserve(new_rbo, false) != 0)) {
610 DRM_ERROR("failed to reserve new rbo in error path\n");
611 goto cleanup;
612 }
613 if (unlikely(radeon_bo_unpin(new_rbo) != 0)) {
614 DRM_ERROR("failed to unpin new rbo in error path\n");
615 }
616 radeon_bo_unreserve(new_rbo);
617
618cleanup:
619 drm_gem_object_unreference_unlocked(&work->old_rbo->gem_base);
620 fence_put(work->fence);
621 kfree(work);
622 return r;
623}
624
625static int
626radeon_crtc_set_config(struct drm_mode_set *set)
627{
628 struct drm_device *dev;
629 struct radeon_device *rdev;
630 struct drm_crtc *crtc;
631 bool active = false;
632 int ret;
633
634 if (!set || !set->crtc)
635 return -EINVAL;
636
637 dev = set->crtc->dev;
638
639 ret = pm_runtime_get_sync(dev->dev);
640 if (ret < 0)
641 return ret;
642
643 ret = drm_crtc_helper_set_config(set);
644
645 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head)
646 if (crtc->enabled)
647 active = true;
648
649 pm_runtime_mark_last_busy(dev->dev);
650
651 rdev = dev->dev_private;
652
653
654 if (active && !rdev->have_disp_power_ref) {
655 rdev->have_disp_power_ref = true;
656 return ret;
657 }
658
659
660 if (!active && rdev->have_disp_power_ref) {
661 pm_runtime_put_autosuspend(dev->dev);
662 rdev->have_disp_power_ref = false;
663 }
664
665
666 pm_runtime_put_autosuspend(dev->dev);
667 return ret;
668}
669
670static const struct drm_crtc_funcs radeon_crtc_funcs = {
671 .cursor_set2 = radeon_crtc_cursor_set2,
672 .cursor_move = radeon_crtc_cursor_move,
673 .gamma_set = radeon_crtc_gamma_set,
674 .set_config = radeon_crtc_set_config,
675 .destroy = radeon_crtc_destroy,
676 .page_flip_target = radeon_crtc_page_flip_target,
677};
678
679static void radeon_crtc_init(struct drm_device *dev, int index)
680{
681 struct radeon_device *rdev = dev->dev_private;
682 struct radeon_crtc *radeon_crtc;
683 int i;
684
685 radeon_crtc = kzalloc(sizeof(struct radeon_crtc) + (RADEONFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
686 if (radeon_crtc == NULL)
687 return;
688
689 drm_crtc_init(dev, &radeon_crtc->base, &radeon_crtc_funcs);
690
691 drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256);
692 radeon_crtc->crtc_id = index;
693 radeon_crtc->flip_queue = alloc_workqueue("radeon-crtc", WQ_HIGHPRI, 0);
694 rdev->mode_info.crtcs[index] = radeon_crtc;
695
696 if (rdev->family >= CHIP_BONAIRE) {
697 radeon_crtc->max_cursor_width = CIK_CURSOR_WIDTH;
698 radeon_crtc->max_cursor_height = CIK_CURSOR_HEIGHT;
699 } else {
700 radeon_crtc->max_cursor_width = CURSOR_WIDTH;
701 radeon_crtc->max_cursor_height = CURSOR_HEIGHT;
702 }
703 dev->mode_config.cursor_width = radeon_crtc->max_cursor_width;
704 dev->mode_config.cursor_height = radeon_crtc->max_cursor_height;
705
706#if 0
707 radeon_crtc->mode_set.crtc = &radeon_crtc->base;
708 radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1);
709 radeon_crtc->mode_set.num_connectors = 0;
710#endif
711
712 for (i = 0; i < 256; i++) {
713 radeon_crtc->lut_r[i] = i << 2;
714 radeon_crtc->lut_g[i] = i << 2;
715 radeon_crtc->lut_b[i] = i << 2;
716 }
717
718 if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom))
719 radeon_atombios_init_crtc(dev, radeon_crtc);
720 else
721 radeon_legacy_init_crtc(dev, radeon_crtc);
722}
723
724static const char *encoder_names[38] = {
725 "NONE",
726 "INTERNAL_LVDS",
727 "INTERNAL_TMDS1",
728 "INTERNAL_TMDS2",
729 "INTERNAL_DAC1",
730 "INTERNAL_DAC2",
731 "INTERNAL_SDVOA",
732 "INTERNAL_SDVOB",
733 "SI170B",
734 "CH7303",
735 "CH7301",
736 "INTERNAL_DVO1",
737 "EXTERNAL_SDVOA",
738 "EXTERNAL_SDVOB",
739 "TITFP513",
740 "INTERNAL_LVTM1",
741 "VT1623",
742 "HDMI_SI1930",
743 "HDMI_INTERNAL",
744 "INTERNAL_KLDSCP_TMDS1",
745 "INTERNAL_KLDSCP_DVO1",
746 "INTERNAL_KLDSCP_DAC1",
747 "INTERNAL_KLDSCP_DAC2",
748 "SI178",
749 "MVPU_FPGA",
750 "INTERNAL_DDI",
751 "VT1625",
752 "HDMI_SI1932",
753 "DP_AN9801",
754 "DP_DP501",
755 "INTERNAL_UNIPHY",
756 "INTERNAL_KLDSCP_LVTMA",
757 "INTERNAL_UNIPHY1",
758 "INTERNAL_UNIPHY2",
759 "NUTMEG",
760 "TRAVIS",
761 "INTERNAL_VCE",
762 "INTERNAL_UNIPHY3",
763};
764
765static const char *hpd_names[6] = {
766 "HPD1",
767 "HPD2",
768 "HPD3",
769 "HPD4",
770 "HPD5",
771 "HPD6",
772};
773
774static void radeon_print_display_setup(struct drm_device *dev)
775{
776 struct drm_connector *connector;
777 struct radeon_connector *radeon_connector;
778 struct drm_encoder *encoder;
779 struct radeon_encoder *radeon_encoder;
780 uint32_t devices;
781 int i = 0;
782
783 DRM_INFO("Radeon Display Connectors\n");
784 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
785 radeon_connector = to_radeon_connector(connector);
786 DRM_INFO("Connector %d:\n", i);
787 DRM_INFO(" %s\n", connector->name);
788 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
789 DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]);
790 if (radeon_connector->ddc_bus) {
791 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
792 radeon_connector->ddc_bus->rec.mask_clk_reg,
793 radeon_connector->ddc_bus->rec.mask_data_reg,
794 radeon_connector->ddc_bus->rec.a_clk_reg,
795 radeon_connector->ddc_bus->rec.a_data_reg,
796 radeon_connector->ddc_bus->rec.en_clk_reg,
797 radeon_connector->ddc_bus->rec.en_data_reg,
798 radeon_connector->ddc_bus->rec.y_clk_reg,
799 radeon_connector->ddc_bus->rec.y_data_reg);
800 if (radeon_connector->router.ddc_valid)
801 DRM_INFO(" DDC Router 0x%x/0x%x\n",
802 radeon_connector->router.ddc_mux_control_pin,
803 radeon_connector->router.ddc_mux_state);
804 if (radeon_connector->router.cd_valid)
805 DRM_INFO(" Clock/Data Router 0x%x/0x%x\n",
806 radeon_connector->router.cd_mux_control_pin,
807 radeon_connector->router.cd_mux_state);
808 } else {
809 if (connector->connector_type == DRM_MODE_CONNECTOR_VGA ||
810 connector->connector_type == DRM_MODE_CONNECTOR_DVII ||
811 connector->connector_type == DRM_MODE_CONNECTOR_DVID ||
812 connector->connector_type == DRM_MODE_CONNECTOR_DVIA ||
813 connector->connector_type == DRM_MODE_CONNECTOR_HDMIA ||
814 connector->connector_type == DRM_MODE_CONNECTOR_HDMIB)
815 DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
816 }
817 DRM_INFO(" Encoders:\n");
818 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
819 radeon_encoder = to_radeon_encoder(encoder);
820 devices = radeon_encoder->devices & radeon_connector->devices;
821 if (devices) {
822 if (devices & ATOM_DEVICE_CRT1_SUPPORT)
823 DRM_INFO(" CRT1: %s\n", encoder_names[radeon_encoder->encoder_id]);
824 if (devices & ATOM_DEVICE_CRT2_SUPPORT)
825 DRM_INFO(" CRT2: %s\n", encoder_names[radeon_encoder->encoder_id]);
826 if (devices & ATOM_DEVICE_LCD1_SUPPORT)
827 DRM_INFO(" LCD1: %s\n", encoder_names[radeon_encoder->encoder_id]);
828 if (devices & ATOM_DEVICE_DFP1_SUPPORT)
829 DRM_INFO(" DFP1: %s\n", encoder_names[radeon_encoder->encoder_id]);
830 if (devices & ATOM_DEVICE_DFP2_SUPPORT)
831 DRM_INFO(" DFP2: %s\n", encoder_names[radeon_encoder->encoder_id]);
832 if (devices & ATOM_DEVICE_DFP3_SUPPORT)
833 DRM_INFO(" DFP3: %s\n", encoder_names[radeon_encoder->encoder_id]);
834 if (devices & ATOM_DEVICE_DFP4_SUPPORT)
835 DRM_INFO(" DFP4: %s\n", encoder_names[radeon_encoder->encoder_id]);
836 if (devices & ATOM_DEVICE_DFP5_SUPPORT)
837 DRM_INFO(" DFP5: %s\n", encoder_names[radeon_encoder->encoder_id]);
838 if (devices & ATOM_DEVICE_DFP6_SUPPORT)
839 DRM_INFO(" DFP6: %s\n", encoder_names[radeon_encoder->encoder_id]);
840 if (devices & ATOM_DEVICE_TV1_SUPPORT)
841 DRM_INFO(" TV1: %s\n", encoder_names[radeon_encoder->encoder_id]);
842 if (devices & ATOM_DEVICE_CV_SUPPORT)
843 DRM_INFO(" CV: %s\n", encoder_names[radeon_encoder->encoder_id]);
844 }
845 }
846 i++;
847 }
848}
849
850static bool radeon_setup_enc_conn(struct drm_device *dev)
851{
852 struct radeon_device *rdev = dev->dev_private;
853 bool ret = false;
854
855 if (rdev->bios) {
856 if (rdev->is_atom_bios) {
857 ret = radeon_get_atom_connector_info_from_supported_devices_table(dev);
858 if (ret == false)
859 ret = radeon_get_atom_connector_info_from_object_table(dev);
860 } else {
861 ret = radeon_get_legacy_connector_info_from_bios(dev);
862 if (ret == false)
863 ret = radeon_get_legacy_connector_info_from_table(dev);
864 }
865 } else {
866 if (!ASIC_IS_AVIVO(rdev))
867 ret = radeon_get_legacy_connector_info_from_table(dev);
868 }
869 if (ret) {
870 radeon_setup_encoder_clones(dev);
871 radeon_print_display_setup(dev);
872 }
873
874 return ret;
875}
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891static void avivo_reduce_ratio(unsigned *nom, unsigned *den,
892 unsigned nom_min, unsigned den_min)
893{
894 unsigned tmp;
895
896
897 tmp = gcd(*nom, *den);
898 *nom /= tmp;
899 *den /= tmp;
900
901
902 if (*nom < nom_min) {
903 tmp = DIV_ROUND_UP(nom_min, *nom);
904 *nom *= tmp;
905 *den *= tmp;
906 }
907
908
909 if (*den < den_min) {
910 tmp = DIV_ROUND_UP(den_min, *den);
911 *nom *= tmp;
912 *den *= tmp;
913 }
914}
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930static void avivo_get_fb_ref_div(unsigned nom, unsigned den, unsigned post_div,
931 unsigned fb_div_max, unsigned ref_div_max,
932 unsigned *fb_div, unsigned *ref_div)
933{
934
935 ref_div_max = max(min(100 / post_div, ref_div_max), 1u);
936
937
938 *ref_div = min(max(DIV_ROUND_CLOSEST(den, post_div), 1u), ref_div_max);
939 *fb_div = DIV_ROUND_CLOSEST(nom * *ref_div * post_div, den);
940
941
942 if (*fb_div > fb_div_max) {
943 *ref_div = DIV_ROUND_CLOSEST(*ref_div * fb_div_max, *fb_div);
944 *fb_div = fb_div_max;
945 }
946}
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961void radeon_compute_pll_avivo(struct radeon_pll *pll,
962 u32 freq,
963 u32 *dot_clock_p,
964 u32 *fb_div_p,
965 u32 *frac_fb_div_p,
966 u32 *ref_div_p,
967 u32 *post_div_p)
968{
969 unsigned target_clock = pll->flags & RADEON_PLL_USE_FRAC_FB_DIV ?
970 freq : freq / 10;
971
972 unsigned fb_div_min, fb_div_max, fb_div;
973 unsigned post_div_min, post_div_max, post_div;
974 unsigned ref_div_min, ref_div_max, ref_div;
975 unsigned post_div_best, diff_best;
976 unsigned nom, den;
977
978
979 fb_div_min = pll->min_feedback_div;
980 fb_div_max = pll->max_feedback_div;
981
982 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
983 fb_div_min *= 10;
984 fb_div_max *= 10;
985 }
986
987
988 if (pll->flags & RADEON_PLL_USE_REF_DIV)
989 ref_div_min = pll->reference_div;
990 else
991 ref_div_min = pll->min_ref_div;
992
993 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV &&
994 pll->flags & RADEON_PLL_USE_REF_DIV)
995 ref_div_max = pll->reference_div;
996 else if (pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP)
997
998 ref_div_max = min(pll->max_ref_div, 7u);
999 else
1000 ref_div_max = pll->max_ref_div;
1001
1002
1003 if (pll->flags & RADEON_PLL_USE_POST_DIV) {
1004 post_div_min = pll->post_div;
1005 post_div_max = pll->post_div;
1006 } else {
1007 unsigned vco_min, vco_max;
1008
1009 if (pll->flags & RADEON_PLL_IS_LCD) {
1010 vco_min = pll->lcd_pll_out_min;
1011 vco_max = pll->lcd_pll_out_max;
1012 } else {
1013 vco_min = pll->pll_out_min;
1014 vco_max = pll->pll_out_max;
1015 }
1016
1017 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1018 vco_min *= 10;
1019 vco_max *= 10;
1020 }
1021
1022 post_div_min = vco_min / target_clock;
1023 if ((target_clock * post_div_min) < vco_min)
1024 ++post_div_min;
1025 if (post_div_min < pll->min_post_div)
1026 post_div_min = pll->min_post_div;
1027
1028 post_div_max = vco_max / target_clock;
1029 if ((target_clock * post_div_max) > vco_max)
1030 --post_div_max;
1031 if (post_div_max > pll->max_post_div)
1032 post_div_max = pll->max_post_div;
1033 }
1034
1035
1036 nom = target_clock;
1037 den = pll->reference_freq;
1038
1039
1040 avivo_reduce_ratio(&nom, &den, fb_div_min, post_div_min);
1041
1042
1043 if (pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP)
1044 post_div_best = post_div_min;
1045 else
1046 post_div_best = post_div_max;
1047 diff_best = ~0;
1048
1049 for (post_div = post_div_min; post_div <= post_div_max; ++post_div) {
1050 unsigned diff;
1051 avivo_get_fb_ref_div(nom, den, post_div, fb_div_max,
1052 ref_div_max, &fb_div, &ref_div);
1053 diff = abs(target_clock - (pll->reference_freq * fb_div) /
1054 (ref_div * post_div));
1055
1056 if (diff < diff_best || (diff == diff_best &&
1057 !(pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP))) {
1058
1059 post_div_best = post_div;
1060 diff_best = diff;
1061 }
1062 }
1063 post_div = post_div_best;
1064
1065
1066 avivo_get_fb_ref_div(nom, den, post_div, fb_div_max, ref_div_max,
1067 &fb_div, &ref_div);
1068
1069
1070
1071 avivo_reduce_ratio(&fb_div, &ref_div, fb_div_min, ref_div_min);
1072
1073
1074 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV && (fb_div % 10)) {
1075 fb_div_min = max(fb_div_min, (9 - (fb_div % 10)) * 20 + 50);
1076 if (fb_div < fb_div_min) {
1077 unsigned tmp = DIV_ROUND_UP(fb_div_min, fb_div);
1078 fb_div *= tmp;
1079 ref_div *= tmp;
1080 }
1081 }
1082
1083
1084 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1085 *fb_div_p = fb_div / 10;
1086 *frac_fb_div_p = fb_div % 10;
1087 } else {
1088 *fb_div_p = fb_div;
1089 *frac_fb_div_p = 0;
1090 }
1091
1092 *dot_clock_p = ((pll->reference_freq * *fb_div_p * 10) +
1093 (pll->reference_freq * *frac_fb_div_p)) /
1094 (ref_div * post_div * 10);
1095 *ref_div_p = ref_div;
1096 *post_div_p = post_div;
1097
1098 DRM_DEBUG_KMS("%d - %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1099 freq, *dot_clock_p * 10, *fb_div_p, *frac_fb_div_p,
1100 ref_div, post_div);
1101}
1102
1103
1104static inline uint32_t radeon_div(uint64_t n, uint32_t d)
1105{
1106 uint64_t mod;
1107
1108 n += d / 2;
1109
1110 mod = do_div(n, d);
1111 return n;
1112}
1113
1114void radeon_compute_pll_legacy(struct radeon_pll *pll,
1115 uint64_t freq,
1116 uint32_t *dot_clock_p,
1117 uint32_t *fb_div_p,
1118 uint32_t *frac_fb_div_p,
1119 uint32_t *ref_div_p,
1120 uint32_t *post_div_p)
1121{
1122 uint32_t min_ref_div = pll->min_ref_div;
1123 uint32_t max_ref_div = pll->max_ref_div;
1124 uint32_t min_post_div = pll->min_post_div;
1125 uint32_t max_post_div = pll->max_post_div;
1126 uint32_t min_fractional_feed_div = 0;
1127 uint32_t max_fractional_feed_div = 0;
1128 uint32_t best_vco = pll->best_vco;
1129 uint32_t best_post_div = 1;
1130 uint32_t best_ref_div = 1;
1131 uint32_t best_feedback_div = 1;
1132 uint32_t best_frac_feedback_div = 0;
1133 uint32_t best_freq = -1;
1134 uint32_t best_error = 0xffffffff;
1135 uint32_t best_vco_diff = 1;
1136 uint32_t post_div;
1137 u32 pll_out_min, pll_out_max;
1138
1139 DRM_DEBUG_KMS("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div);
1140 freq = freq * 1000;
1141
1142 if (pll->flags & RADEON_PLL_IS_LCD) {
1143 pll_out_min = pll->lcd_pll_out_min;
1144 pll_out_max = pll->lcd_pll_out_max;
1145 } else {
1146 pll_out_min = pll->pll_out_min;
1147 pll_out_max = pll->pll_out_max;
1148 }
1149
1150 if (pll_out_min > 64800)
1151 pll_out_min = 64800;
1152
1153 if (pll->flags & RADEON_PLL_USE_REF_DIV)
1154 min_ref_div = max_ref_div = pll->reference_div;
1155 else {
1156 while (min_ref_div < max_ref_div-1) {
1157 uint32_t mid = (min_ref_div + max_ref_div) / 2;
1158 uint32_t pll_in = pll->reference_freq / mid;
1159 if (pll_in < pll->pll_in_min)
1160 max_ref_div = mid;
1161 else if (pll_in > pll->pll_in_max)
1162 min_ref_div = mid;
1163 else
1164 break;
1165 }
1166 }
1167
1168 if (pll->flags & RADEON_PLL_USE_POST_DIV)
1169 min_post_div = max_post_div = pll->post_div;
1170
1171 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1172 min_fractional_feed_div = pll->min_frac_feedback_div;
1173 max_fractional_feed_div = pll->max_frac_feedback_div;
1174 }
1175
1176 for (post_div = max_post_div; post_div >= min_post_div; --post_div) {
1177 uint32_t ref_div;
1178
1179 if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1))
1180 continue;
1181
1182
1183 if (pll->flags & RADEON_PLL_LEGACY) {
1184 if ((post_div == 5) ||
1185 (post_div == 7) ||
1186 (post_div == 9) ||
1187 (post_div == 10) ||
1188 (post_div == 11) ||
1189 (post_div == 13) ||
1190 (post_div == 14) ||
1191 (post_div == 15))
1192 continue;
1193 }
1194
1195 for (ref_div = min_ref_div; ref_div <= max_ref_div; ++ref_div) {
1196 uint32_t feedback_div, current_freq = 0, error, vco_diff;
1197 uint32_t pll_in = pll->reference_freq / ref_div;
1198 uint32_t min_feed_div = pll->min_feedback_div;
1199 uint32_t max_feed_div = pll->max_feedback_div + 1;
1200
1201 if (pll_in < pll->pll_in_min || pll_in > pll->pll_in_max)
1202 continue;
1203
1204 while (min_feed_div < max_feed_div) {
1205 uint32_t vco;
1206 uint32_t min_frac_feed_div = min_fractional_feed_div;
1207 uint32_t max_frac_feed_div = max_fractional_feed_div + 1;
1208 uint32_t frac_feedback_div;
1209 uint64_t tmp;
1210
1211 feedback_div = (min_feed_div + max_feed_div) / 2;
1212
1213 tmp = (uint64_t)pll->reference_freq * feedback_div;
1214 vco = radeon_div(tmp, ref_div);
1215
1216 if (vco < pll_out_min) {
1217 min_feed_div = feedback_div + 1;
1218 continue;
1219 } else if (vco > pll_out_max) {
1220 max_feed_div = feedback_div;
1221 continue;
1222 }
1223
1224 while (min_frac_feed_div < max_frac_feed_div) {
1225 frac_feedback_div = (min_frac_feed_div + max_frac_feed_div) / 2;
1226 tmp = (uint64_t)pll->reference_freq * 10000 * feedback_div;
1227 tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div;
1228 current_freq = radeon_div(tmp, ref_div * post_div);
1229
1230 if (pll->flags & RADEON_PLL_PREFER_CLOSEST_LOWER) {
1231 if (freq < current_freq)
1232 error = 0xffffffff;
1233 else
1234 error = freq - current_freq;
1235 } else
1236 error = abs(current_freq - freq);
1237 vco_diff = abs(vco - best_vco);
1238
1239 if ((best_vco == 0 && error < best_error) ||
1240 (best_vco != 0 &&
1241 ((best_error > 100 && error < best_error - 100) ||
1242 (abs(error - best_error) < 100 && vco_diff < best_vco_diff)))) {
1243 best_post_div = post_div;
1244 best_ref_div = ref_div;
1245 best_feedback_div = feedback_div;
1246 best_frac_feedback_div = frac_feedback_div;
1247 best_freq = current_freq;
1248 best_error = error;
1249 best_vco_diff = vco_diff;
1250 } else if (current_freq == freq) {
1251 if (best_freq == -1) {
1252 best_post_div = post_div;
1253 best_ref_div = ref_div;
1254 best_feedback_div = feedback_div;
1255 best_frac_feedback_div = frac_feedback_div;
1256 best_freq = current_freq;
1257 best_error = error;
1258 best_vco_diff = vco_diff;
1259 } else if (((pll->flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) ||
1260 ((pll->flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) ||
1261 ((pll->flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) ||
1262 ((pll->flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) ||
1263 ((pll->flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) ||
1264 ((pll->flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) {
1265 best_post_div = post_div;
1266 best_ref_div = ref_div;
1267 best_feedback_div = feedback_div;
1268 best_frac_feedback_div = frac_feedback_div;
1269 best_freq = current_freq;
1270 best_error = error;
1271 best_vco_diff = vco_diff;
1272 }
1273 }
1274 if (current_freq < freq)
1275 min_frac_feed_div = frac_feedback_div + 1;
1276 else
1277 max_frac_feed_div = frac_feedback_div;
1278 }
1279 if (current_freq < freq)
1280 min_feed_div = feedback_div + 1;
1281 else
1282 max_feed_div = feedback_div;
1283 }
1284 }
1285 }
1286
1287 *dot_clock_p = best_freq / 10000;
1288 *fb_div_p = best_feedback_div;
1289 *frac_fb_div_p = best_frac_feedback_div;
1290 *ref_div_p = best_ref_div;
1291 *post_div_p = best_post_div;
1292 DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1293 (long long)freq,
1294 best_freq / 1000, best_feedback_div, best_frac_feedback_div,
1295 best_ref_div, best_post_div);
1296
1297}
1298
1299static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb)
1300{
1301 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
1302
1303 drm_gem_object_unreference_unlocked(radeon_fb->obj);
1304 drm_framebuffer_cleanup(fb);
1305 kfree(radeon_fb);
1306}
1307
1308static int radeon_user_framebuffer_create_handle(struct drm_framebuffer *fb,
1309 struct drm_file *file_priv,
1310 unsigned int *handle)
1311{
1312 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
1313
1314 return drm_gem_handle_create(file_priv, radeon_fb->obj, handle);
1315}
1316
1317static const struct drm_framebuffer_funcs radeon_fb_funcs = {
1318 .destroy = radeon_user_framebuffer_destroy,
1319 .create_handle = radeon_user_framebuffer_create_handle,
1320};
1321
1322int
1323radeon_framebuffer_init(struct drm_device *dev,
1324 struct radeon_framebuffer *rfb,
1325 const struct drm_mode_fb_cmd2 *mode_cmd,
1326 struct drm_gem_object *obj)
1327{
1328 int ret;
1329 rfb->obj = obj;
1330 drm_helper_mode_fill_fb_struct(&rfb->base, mode_cmd);
1331 ret = drm_framebuffer_init(dev, &rfb->base, &radeon_fb_funcs);
1332 if (ret) {
1333 rfb->obj = NULL;
1334 return ret;
1335 }
1336 return 0;
1337}
1338
1339static struct drm_framebuffer *
1340radeon_user_framebuffer_create(struct drm_device *dev,
1341 struct drm_file *file_priv,
1342 const struct drm_mode_fb_cmd2 *mode_cmd)
1343{
1344 struct drm_gem_object *obj;
1345 struct radeon_framebuffer *radeon_fb;
1346 int ret;
1347
1348 obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]);
1349 if (obj == NULL) {
1350 dev_err(&dev->pdev->dev, "No GEM object associated to handle 0x%08X, "
1351 "can't create framebuffer\n", mode_cmd->handles[0]);
1352 return ERR_PTR(-ENOENT);
1353 }
1354
1355 radeon_fb = kzalloc(sizeof(*radeon_fb), GFP_KERNEL);
1356 if (radeon_fb == NULL) {
1357 drm_gem_object_unreference_unlocked(obj);
1358 return ERR_PTR(-ENOMEM);
1359 }
1360
1361 ret = radeon_framebuffer_init(dev, radeon_fb, mode_cmd, obj);
1362 if (ret) {
1363 kfree(radeon_fb);
1364 drm_gem_object_unreference_unlocked(obj);
1365 return ERR_PTR(ret);
1366 }
1367
1368 return &radeon_fb->base;
1369}
1370
1371static void radeon_output_poll_changed(struct drm_device *dev)
1372{
1373 struct radeon_device *rdev = dev->dev_private;
1374 radeon_fb_output_poll_changed(rdev);
1375}
1376
1377static const struct drm_mode_config_funcs radeon_mode_funcs = {
1378 .fb_create = radeon_user_framebuffer_create,
1379 .output_poll_changed = radeon_output_poll_changed
1380};
1381
1382static struct drm_prop_enum_list radeon_tmds_pll_enum_list[] =
1383{ { 0, "driver" },
1384 { 1, "bios" },
1385};
1386
1387static struct drm_prop_enum_list radeon_tv_std_enum_list[] =
1388{ { TV_STD_NTSC, "ntsc" },
1389 { TV_STD_PAL, "pal" },
1390 { TV_STD_PAL_M, "pal-m" },
1391 { TV_STD_PAL_60, "pal-60" },
1392 { TV_STD_NTSC_J, "ntsc-j" },
1393 { TV_STD_SCART_PAL, "scart-pal" },
1394 { TV_STD_PAL_CN, "pal-cn" },
1395 { TV_STD_SECAM, "secam" },
1396};
1397
1398static struct drm_prop_enum_list radeon_underscan_enum_list[] =
1399{ { UNDERSCAN_OFF, "off" },
1400 { UNDERSCAN_ON, "on" },
1401 { UNDERSCAN_AUTO, "auto" },
1402};
1403
1404static struct drm_prop_enum_list radeon_audio_enum_list[] =
1405{ { RADEON_AUDIO_DISABLE, "off" },
1406 { RADEON_AUDIO_ENABLE, "on" },
1407 { RADEON_AUDIO_AUTO, "auto" },
1408};
1409
1410
1411static struct drm_prop_enum_list radeon_dither_enum_list[] =
1412{ { RADEON_FMT_DITHER_DISABLE, "off" },
1413 { RADEON_FMT_DITHER_ENABLE, "on" },
1414};
1415
1416static struct drm_prop_enum_list radeon_output_csc_enum_list[] =
1417{ { RADEON_OUTPUT_CSC_BYPASS, "bypass" },
1418 { RADEON_OUTPUT_CSC_TVRGB, "tvrgb" },
1419 { RADEON_OUTPUT_CSC_YCBCR601, "ycbcr601" },
1420 { RADEON_OUTPUT_CSC_YCBCR709, "ycbcr709" },
1421};
1422
1423static int radeon_modeset_create_props(struct radeon_device *rdev)
1424{
1425 int sz;
1426
1427 if (rdev->is_atom_bios) {
1428 rdev->mode_info.coherent_mode_property =
1429 drm_property_create_range(rdev->ddev, 0 , "coherent", 0, 1);
1430 if (!rdev->mode_info.coherent_mode_property)
1431 return -ENOMEM;
1432 }
1433
1434 if (!ASIC_IS_AVIVO(rdev)) {
1435 sz = ARRAY_SIZE(radeon_tmds_pll_enum_list);
1436 rdev->mode_info.tmds_pll_property =
1437 drm_property_create_enum(rdev->ddev, 0,
1438 "tmds_pll",
1439 radeon_tmds_pll_enum_list, sz);
1440 }
1441
1442 rdev->mode_info.load_detect_property =
1443 drm_property_create_range(rdev->ddev, 0, "load detection", 0, 1);
1444 if (!rdev->mode_info.load_detect_property)
1445 return -ENOMEM;
1446
1447 drm_mode_create_scaling_mode_property(rdev->ddev);
1448
1449 sz = ARRAY_SIZE(radeon_tv_std_enum_list);
1450 rdev->mode_info.tv_std_property =
1451 drm_property_create_enum(rdev->ddev, 0,
1452 "tv standard",
1453 radeon_tv_std_enum_list, sz);
1454
1455 sz = ARRAY_SIZE(radeon_underscan_enum_list);
1456 rdev->mode_info.underscan_property =
1457 drm_property_create_enum(rdev->ddev, 0,
1458 "underscan",
1459 radeon_underscan_enum_list, sz);
1460
1461 rdev->mode_info.underscan_hborder_property =
1462 drm_property_create_range(rdev->ddev, 0,
1463 "underscan hborder", 0, 128);
1464 if (!rdev->mode_info.underscan_hborder_property)
1465 return -ENOMEM;
1466
1467 rdev->mode_info.underscan_vborder_property =
1468 drm_property_create_range(rdev->ddev, 0,
1469 "underscan vborder", 0, 128);
1470 if (!rdev->mode_info.underscan_vborder_property)
1471 return -ENOMEM;
1472
1473 sz = ARRAY_SIZE(radeon_audio_enum_list);
1474 rdev->mode_info.audio_property =
1475 drm_property_create_enum(rdev->ddev, 0,
1476 "audio",
1477 radeon_audio_enum_list, sz);
1478
1479 sz = ARRAY_SIZE(radeon_dither_enum_list);
1480 rdev->mode_info.dither_property =
1481 drm_property_create_enum(rdev->ddev, 0,
1482 "dither",
1483 radeon_dither_enum_list, sz);
1484
1485 sz = ARRAY_SIZE(radeon_output_csc_enum_list);
1486 rdev->mode_info.output_csc_property =
1487 drm_property_create_enum(rdev->ddev, 0,
1488 "output_csc",
1489 radeon_output_csc_enum_list, sz);
1490
1491 return 0;
1492}
1493
1494void radeon_update_display_priority(struct radeon_device *rdev)
1495{
1496
1497 if ((radeon_disp_priority == 0) || (radeon_disp_priority > 2)) {
1498
1499
1500
1501
1502
1503
1504 if ((ASIC_IS_R300(rdev) || (rdev->family == CHIP_RV515)) &&
1505 !(rdev->flags & RADEON_IS_IGP))
1506 rdev->disp_priority = 2;
1507 else
1508 rdev->disp_priority = 0;
1509 } else
1510 rdev->disp_priority = radeon_disp_priority;
1511
1512}
1513
1514
1515
1516
1517static void radeon_afmt_init(struct radeon_device *rdev)
1518{
1519 int i;
1520
1521 for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++)
1522 rdev->mode_info.afmt[i] = NULL;
1523
1524 if (ASIC_IS_NODCE(rdev)) {
1525
1526 } else if (ASIC_IS_DCE4(rdev)) {
1527 static uint32_t eg_offsets[] = {
1528 EVERGREEN_CRTC0_REGISTER_OFFSET,
1529 EVERGREEN_CRTC1_REGISTER_OFFSET,
1530 EVERGREEN_CRTC2_REGISTER_OFFSET,
1531 EVERGREEN_CRTC3_REGISTER_OFFSET,
1532 EVERGREEN_CRTC4_REGISTER_OFFSET,
1533 EVERGREEN_CRTC5_REGISTER_OFFSET,
1534 0x13830 - 0x7030,
1535 };
1536 int num_afmt;
1537
1538
1539
1540
1541
1542 if (ASIC_IS_DCE8(rdev))
1543 num_afmt = 7;
1544 else if (ASIC_IS_DCE6(rdev))
1545 num_afmt = 6;
1546 else if (ASIC_IS_DCE5(rdev))
1547 num_afmt = 6;
1548 else if (ASIC_IS_DCE41(rdev))
1549 num_afmt = 2;
1550 else
1551 num_afmt = 6;
1552
1553 BUG_ON(num_afmt > ARRAY_SIZE(eg_offsets));
1554 for (i = 0; i < num_afmt; i++) {
1555 rdev->mode_info.afmt[i] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1556 if (rdev->mode_info.afmt[i]) {
1557 rdev->mode_info.afmt[i]->offset = eg_offsets[i];
1558 rdev->mode_info.afmt[i]->id = i;
1559 }
1560 }
1561 } else if (ASIC_IS_DCE3(rdev)) {
1562
1563 rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1564 if (rdev->mode_info.afmt[0]) {
1565 rdev->mode_info.afmt[0]->offset = DCE3_HDMI_OFFSET0;
1566 rdev->mode_info.afmt[0]->id = 0;
1567 }
1568 rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1569 if (rdev->mode_info.afmt[1]) {
1570 rdev->mode_info.afmt[1]->offset = DCE3_HDMI_OFFSET1;
1571 rdev->mode_info.afmt[1]->id = 1;
1572 }
1573 } else if (ASIC_IS_DCE2(rdev)) {
1574
1575 rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1576 if (rdev->mode_info.afmt[0]) {
1577 rdev->mode_info.afmt[0]->offset = DCE2_HDMI_OFFSET0;
1578 rdev->mode_info.afmt[0]->id = 0;
1579 }
1580
1581 if (rdev->family >= CHIP_R600) {
1582 rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1583 if (rdev->mode_info.afmt[1]) {
1584 rdev->mode_info.afmt[1]->offset = DCE2_HDMI_OFFSET1;
1585 rdev->mode_info.afmt[1]->id = 1;
1586 }
1587 }
1588 }
1589}
1590
1591static void radeon_afmt_fini(struct radeon_device *rdev)
1592{
1593 int i;
1594
1595 for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++) {
1596 kfree(rdev->mode_info.afmt[i]);
1597 rdev->mode_info.afmt[i] = NULL;
1598 }
1599}
1600
1601int radeon_modeset_init(struct radeon_device *rdev)
1602{
1603 int i;
1604 int ret;
1605
1606 drm_mode_config_init(rdev->ddev);
1607 rdev->mode_info.mode_config_initialized = true;
1608
1609 rdev->ddev->mode_config.funcs = &radeon_mode_funcs;
1610
1611 if (radeon_use_pflipirq == 2 && rdev->family >= CHIP_R600)
1612 rdev->ddev->mode_config.async_page_flip = true;
1613
1614 if (ASIC_IS_DCE5(rdev)) {
1615 rdev->ddev->mode_config.max_width = 16384;
1616 rdev->ddev->mode_config.max_height = 16384;
1617 } else if (ASIC_IS_AVIVO(rdev)) {
1618 rdev->ddev->mode_config.max_width = 8192;
1619 rdev->ddev->mode_config.max_height = 8192;
1620 } else {
1621 rdev->ddev->mode_config.max_width = 4096;
1622 rdev->ddev->mode_config.max_height = 4096;
1623 }
1624
1625 rdev->ddev->mode_config.preferred_depth = 24;
1626 rdev->ddev->mode_config.prefer_shadow = 1;
1627
1628 rdev->ddev->mode_config.fb_base = rdev->mc.aper_base;
1629
1630 ret = radeon_modeset_create_props(rdev);
1631 if (ret) {
1632 return ret;
1633 }
1634
1635
1636 radeon_i2c_init(rdev);
1637
1638
1639 if (!rdev->is_atom_bios) {
1640
1641 radeon_combios_check_hardcoded_edid(rdev);
1642 }
1643
1644
1645 for (i = 0; i < rdev->num_crtc; i++) {
1646 radeon_crtc_init(rdev->ddev, i);
1647 }
1648
1649
1650 ret = radeon_setup_enc_conn(rdev->ddev);
1651 if (!ret) {
1652 return ret;
1653 }
1654
1655
1656 if (rdev->is_atom_bios) {
1657 radeon_atom_encoder_init(rdev);
1658 radeon_atom_disp_eng_pll_init(rdev);
1659 }
1660
1661
1662 radeon_hpd_init(rdev);
1663
1664
1665 radeon_afmt_init(rdev);
1666
1667 radeon_fbdev_init(rdev);
1668 drm_kms_helper_poll_init(rdev->ddev);
1669
1670
1671 ret = radeon_pm_late_init(rdev);
1672
1673 return 0;
1674}
1675
1676void radeon_modeset_fini(struct radeon_device *rdev)
1677{
1678 if (rdev->mode_info.mode_config_initialized) {
1679 drm_kms_helper_poll_fini(rdev->ddev);
1680 radeon_hpd_fini(rdev);
1681 drm_crtc_force_disable_all(rdev->ddev);
1682 radeon_fbdev_fini(rdev);
1683 radeon_afmt_fini(rdev);
1684 drm_mode_config_cleanup(rdev->ddev);
1685 rdev->mode_info.mode_config_initialized = false;
1686 }
1687
1688 kfree(rdev->mode_info.bios_hardcoded_edid);
1689
1690
1691 radeon_i2c_fini(rdev);
1692}
1693
1694static bool is_hdtv_mode(const struct drm_display_mode *mode)
1695{
1696
1697 if ((mode->vdisplay == 480 && mode->hdisplay == 720) ||
1698 (mode->vdisplay == 576) ||
1699 (mode->vdisplay == 720) ||
1700 (mode->vdisplay == 1080))
1701 return true;
1702 else
1703 return false;
1704}
1705
1706bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
1707 const struct drm_display_mode *mode,
1708 struct drm_display_mode *adjusted_mode)
1709{
1710 struct drm_device *dev = crtc->dev;
1711 struct radeon_device *rdev = dev->dev_private;
1712 struct drm_encoder *encoder;
1713 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1714 struct radeon_encoder *radeon_encoder;
1715 struct drm_connector *connector;
1716 struct radeon_connector *radeon_connector;
1717 bool first = true;
1718 u32 src_v = 1, dst_v = 1;
1719 u32 src_h = 1, dst_h = 1;
1720
1721 radeon_crtc->h_border = 0;
1722 radeon_crtc->v_border = 0;
1723
1724 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1725 if (encoder->crtc != crtc)
1726 continue;
1727 radeon_encoder = to_radeon_encoder(encoder);
1728 connector = radeon_get_connector_for_encoder(encoder);
1729 radeon_connector = to_radeon_connector(connector);
1730
1731 if (first) {
1732
1733 if (radeon_encoder->rmx_type == RMX_OFF)
1734 radeon_crtc->rmx_type = RMX_OFF;
1735 else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay ||
1736 mode->vdisplay < radeon_encoder->native_mode.vdisplay)
1737 radeon_crtc->rmx_type = radeon_encoder->rmx_type;
1738 else
1739 radeon_crtc->rmx_type = RMX_OFF;
1740
1741 memcpy(&radeon_crtc->native_mode,
1742 &radeon_encoder->native_mode,
1743 sizeof(struct drm_display_mode));
1744 src_v = crtc->mode.vdisplay;
1745 dst_v = radeon_crtc->native_mode.vdisplay;
1746 src_h = crtc->mode.hdisplay;
1747 dst_h = radeon_crtc->native_mode.hdisplay;
1748
1749
1750 if (ASIC_IS_AVIVO(rdev) &&
1751 (!(mode->flags & DRM_MODE_FLAG_INTERLACE)) &&
1752 ((radeon_encoder->underscan_type == UNDERSCAN_ON) ||
1753 ((radeon_encoder->underscan_type == UNDERSCAN_AUTO) &&
1754 drm_detect_hdmi_monitor(radeon_connector_edid(connector)) &&
1755 is_hdtv_mode(mode)))) {
1756 if (radeon_encoder->underscan_hborder != 0)
1757 radeon_crtc->h_border = radeon_encoder->underscan_hborder;
1758 else
1759 radeon_crtc->h_border = (mode->hdisplay >> 5) + 16;
1760 if (radeon_encoder->underscan_vborder != 0)
1761 radeon_crtc->v_border = radeon_encoder->underscan_vborder;
1762 else
1763 radeon_crtc->v_border = (mode->vdisplay >> 5) + 16;
1764 radeon_crtc->rmx_type = RMX_FULL;
1765 src_v = crtc->mode.vdisplay;
1766 dst_v = crtc->mode.vdisplay - (radeon_crtc->v_border * 2);
1767 src_h = crtc->mode.hdisplay;
1768 dst_h = crtc->mode.hdisplay - (radeon_crtc->h_border * 2);
1769 }
1770 first = false;
1771 } else {
1772 if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) {
1773
1774
1775
1776
1777
1778
1779 DRM_ERROR("Scaling not consistent across encoder.\n");
1780 return false;
1781 }
1782 }
1783 }
1784 if (radeon_crtc->rmx_type != RMX_OFF) {
1785 fixed20_12 a, b;
1786 a.full = dfixed_const(src_v);
1787 b.full = dfixed_const(dst_v);
1788 radeon_crtc->vsc.full = dfixed_div(a, b);
1789 a.full = dfixed_const(src_h);
1790 b.full = dfixed_const(dst_h);
1791 radeon_crtc->hsc.full = dfixed_div(a, b);
1792 } else {
1793 radeon_crtc->vsc.full = dfixed_const(1);
1794 radeon_crtc->hsc.full = dfixed_const(1);
1795 }
1796 return true;
1797}
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836int radeon_get_crtc_scanoutpos(struct drm_device *dev, unsigned int pipe,
1837 unsigned int flags, int *vpos, int *hpos,
1838 ktime_t *stime, ktime_t *etime,
1839 const struct drm_display_mode *mode)
1840{
1841 u32 stat_crtc = 0, vbl = 0, position = 0;
1842 int vbl_start, vbl_end, vtotal, ret = 0;
1843 bool in_vbl = true;
1844
1845 struct radeon_device *rdev = dev->dev_private;
1846
1847
1848
1849
1850 if (stime)
1851 *stime = ktime_get();
1852
1853 if (ASIC_IS_DCE4(rdev)) {
1854 if (pipe == 0) {
1855 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1856 EVERGREEN_CRTC0_REGISTER_OFFSET);
1857 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1858 EVERGREEN_CRTC0_REGISTER_OFFSET);
1859 ret |= DRM_SCANOUTPOS_VALID;
1860 }
1861 if (pipe == 1) {
1862 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1863 EVERGREEN_CRTC1_REGISTER_OFFSET);
1864 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1865 EVERGREEN_CRTC1_REGISTER_OFFSET);
1866 ret |= DRM_SCANOUTPOS_VALID;
1867 }
1868 if (pipe == 2) {
1869 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1870 EVERGREEN_CRTC2_REGISTER_OFFSET);
1871 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1872 EVERGREEN_CRTC2_REGISTER_OFFSET);
1873 ret |= DRM_SCANOUTPOS_VALID;
1874 }
1875 if (pipe == 3) {
1876 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1877 EVERGREEN_CRTC3_REGISTER_OFFSET);
1878 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1879 EVERGREEN_CRTC3_REGISTER_OFFSET);
1880 ret |= DRM_SCANOUTPOS_VALID;
1881 }
1882 if (pipe == 4) {
1883 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1884 EVERGREEN_CRTC4_REGISTER_OFFSET);
1885 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1886 EVERGREEN_CRTC4_REGISTER_OFFSET);
1887 ret |= DRM_SCANOUTPOS_VALID;
1888 }
1889 if (pipe == 5) {
1890 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1891 EVERGREEN_CRTC5_REGISTER_OFFSET);
1892 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1893 EVERGREEN_CRTC5_REGISTER_OFFSET);
1894 ret |= DRM_SCANOUTPOS_VALID;
1895 }
1896 } else if (ASIC_IS_AVIVO(rdev)) {
1897 if (pipe == 0) {
1898 vbl = RREG32(AVIVO_D1CRTC_V_BLANK_START_END);
1899 position = RREG32(AVIVO_D1CRTC_STATUS_POSITION);
1900 ret |= DRM_SCANOUTPOS_VALID;
1901 }
1902 if (pipe == 1) {
1903 vbl = RREG32(AVIVO_D2CRTC_V_BLANK_START_END);
1904 position = RREG32(AVIVO_D2CRTC_STATUS_POSITION);
1905 ret |= DRM_SCANOUTPOS_VALID;
1906 }
1907 } else {
1908
1909 if (pipe == 0) {
1910
1911
1912
1913 vbl = (RREG32(RADEON_CRTC_V_TOTAL_DISP) &
1914 RADEON_CRTC_V_DISP) >> RADEON_CRTC_V_DISP_SHIFT;
1915
1916 position = (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL;
1917 stat_crtc = RREG32(RADEON_CRTC_STATUS);
1918 if (!(stat_crtc & 1))
1919 in_vbl = false;
1920
1921 ret |= DRM_SCANOUTPOS_VALID;
1922 }
1923 if (pipe == 1) {
1924 vbl = (RREG32(RADEON_CRTC2_V_TOTAL_DISP) &
1925 RADEON_CRTC_V_DISP) >> RADEON_CRTC_V_DISP_SHIFT;
1926 position = (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL;
1927 stat_crtc = RREG32(RADEON_CRTC2_STATUS);
1928 if (!(stat_crtc & 1))
1929 in_vbl = false;
1930
1931 ret |= DRM_SCANOUTPOS_VALID;
1932 }
1933 }
1934
1935
1936 if (etime)
1937 *etime = ktime_get();
1938
1939
1940
1941
1942 *vpos = position & 0x1fff;
1943 *hpos = (position >> 16) & 0x1fff;
1944
1945
1946 if (vbl > 0) {
1947
1948 ret |= DRM_SCANOUTPOS_ACCURATE;
1949 vbl_start = vbl & 0x1fff;
1950 vbl_end = (vbl >> 16) & 0x1fff;
1951 }
1952 else {
1953
1954 vbl_start = mode->crtc_vdisplay;
1955 vbl_end = 0;
1956 }
1957
1958
1959 if (flags & GET_DISTANCE_TO_VBLANKSTART) {
1960
1961 *hpos = *vpos - vbl_start;
1962 }
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974 if (!(flags & USE_REAL_VBLANKSTART))
1975 vbl_start -= rdev->mode_info.crtcs[pipe]->lb_vblank_lead_lines;
1976
1977
1978 if ((*vpos < vbl_start) && (*vpos >= vbl_end))
1979 in_vbl = false;
1980
1981
1982 if (in_vbl)
1983 ret |= DRM_SCANOUTPOS_IN_VBLANK;
1984
1985
1986 if (flags & GET_DISTANCE_TO_VBLANKSTART) {
1987
1988 *vpos -= vbl_start;
1989 return ret;
1990 }
1991
1992
1993
1994
1995
1996
1997
1998
1999 if (in_vbl && (*vpos >= vbl_start)) {
2000 vtotal = mode->crtc_vtotal;
2001 *vpos = *vpos - vtotal;
2002 }
2003
2004
2005 *vpos = *vpos - vbl_end;
2006
2007 return ret;
2008}
2009