1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34#include <drm/drm_atomic_helper.h>
35#include <drm/drm_fourcc.h>
36#include <drm/drm_plane_helper.h>
37
38#include "gt/intel_rps.h"
39
40#include "intel_atomic_plane.h"
41#include "intel_cdclk.h"
42#include "intel_display_trace.h"
43#include "intel_display_types.h"
44#include "intel_fb.h"
45#include "intel_fb_pin.h"
46#include "intel_pm.h"
47#include "intel_sprite.h"
48
49static void intel_plane_state_reset(struct intel_plane_state *plane_state,
50 struct intel_plane *plane)
51{
52 memset(plane_state, 0, sizeof(*plane_state));
53
54 __drm_atomic_helper_plane_state_reset(&plane_state->uapi, &plane->base);
55
56 plane_state->scaler_id = -1;
57}
58
59struct intel_plane *intel_plane_alloc(void)
60{
61 struct intel_plane_state *plane_state;
62 struct intel_plane *plane;
63
64 plane = kzalloc(sizeof(*plane), GFP_KERNEL);
65 if (!plane)
66 return ERR_PTR(-ENOMEM);
67
68 plane_state = kzalloc(sizeof(*plane_state), GFP_KERNEL);
69 if (!plane_state) {
70 kfree(plane);
71 return ERR_PTR(-ENOMEM);
72 }
73
74 intel_plane_state_reset(plane_state, plane);
75
76 plane->base.state = &plane_state->uapi;
77
78 return plane;
79}
80
81void intel_plane_free(struct intel_plane *plane)
82{
83 intel_plane_destroy_state(&plane->base, plane->base.state);
84 kfree(plane);
85}
86
87
88
89
90
91
92
93
94
95
96struct drm_plane_state *
97intel_plane_duplicate_state(struct drm_plane *plane)
98{
99 struct intel_plane_state *intel_state;
100
101 intel_state = to_intel_plane_state(plane->state);
102 intel_state = kmemdup(intel_state, sizeof(*intel_state), GFP_KERNEL);
103
104 if (!intel_state)
105 return NULL;
106
107 __drm_atomic_helper_plane_duplicate_state(plane, &intel_state->uapi);
108
109 intel_state->ggtt_vma = NULL;
110 intel_state->dpt_vma = NULL;
111 intel_state->flags = 0;
112
113
114 if (intel_state->hw.fb)
115 drm_framebuffer_get(intel_state->hw.fb);
116
117 return &intel_state->uapi;
118}
119
120
121
122
123
124
125
126
127
128void
129intel_plane_destroy_state(struct drm_plane *plane,
130 struct drm_plane_state *state)
131{
132 struct intel_plane_state *plane_state = to_intel_plane_state(state);
133
134 drm_WARN_ON(plane->dev, plane_state->ggtt_vma);
135 drm_WARN_ON(plane->dev, plane_state->dpt_vma);
136
137 __drm_atomic_helper_plane_destroy_state(&plane_state->uapi);
138 if (plane_state->hw.fb)
139 drm_framebuffer_put(plane_state->hw.fb);
140 kfree(plane_state);
141}
142
143unsigned int intel_adjusted_rate(const struct drm_rect *src,
144 const struct drm_rect *dst,
145 unsigned int rate)
146{
147 unsigned int src_w, src_h, dst_w, dst_h;
148
149 src_w = drm_rect_width(src) >> 16;
150 src_h = drm_rect_height(src) >> 16;
151 dst_w = drm_rect_width(dst);
152 dst_h = drm_rect_height(dst);
153
154
155 dst_w = min(src_w, dst_w);
156 dst_h = min(src_h, dst_h);
157
158 return DIV_ROUND_UP_ULL(mul_u32_u32(rate, src_w * src_h),
159 dst_w * dst_h);
160}
161
162unsigned int intel_plane_pixel_rate(const struct intel_crtc_state *crtc_state,
163 const struct intel_plane_state *plane_state)
164{
165
166
167
168
169
170
171
172
173
174
175
176
177 return intel_adjusted_rate(&plane_state->uapi.src,
178 &plane_state->uapi.dst,
179 crtc_state->pixel_rate);
180}
181
182unsigned int intel_plane_data_rate(const struct intel_crtc_state *crtc_state,
183 const struct intel_plane_state *plane_state)
184{
185 const struct drm_framebuffer *fb = plane_state->hw.fb;
186 unsigned int cpp;
187 unsigned int pixel_rate;
188
189 if (!plane_state->uapi.visible)
190 return 0;
191
192 pixel_rate = intel_plane_pixel_rate(crtc_state, plane_state);
193
194 cpp = fb->format->cpp[0];
195
196
197
198
199
200
201
202 if (fb->format->is_yuv && fb->format->num_planes > 1)
203 cpp *= 4;
204
205 return pixel_rate * cpp;
206}
207
208int intel_plane_calc_min_cdclk(struct intel_atomic_state *state,
209 struct intel_plane *plane,
210 bool *need_cdclk_calc)
211{
212 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
213 const struct intel_plane_state *plane_state =
214 intel_atomic_get_new_plane_state(state, plane);
215 struct intel_crtc *crtc = to_intel_crtc(plane_state->hw.crtc);
216 const struct intel_cdclk_state *cdclk_state;
217 const struct intel_crtc_state *old_crtc_state;
218 struct intel_crtc_state *new_crtc_state;
219
220 if (!plane_state->uapi.visible || !plane->min_cdclk)
221 return 0;
222
223 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc);
224 new_crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
225
226 new_crtc_state->min_cdclk[plane->id] =
227 plane->min_cdclk(new_crtc_state, plane_state);
228
229
230
231
232
233
234
235
236
237 if (new_crtc_state->min_cdclk[plane->id] <=
238 old_crtc_state->min_cdclk[plane->id])
239 return 0;
240
241 cdclk_state = intel_atomic_get_cdclk_state(state);
242 if (IS_ERR(cdclk_state))
243 return PTR_ERR(cdclk_state);
244
245
246
247
248
249
250
251
252
253 if (new_crtc_state->min_cdclk[plane->id] <=
254 cdclk_state->min_cdclk[crtc->pipe])
255 return 0;
256
257 drm_dbg_kms(&dev_priv->drm,
258 "[PLANE:%d:%s] min cdclk (%d kHz) > [CRTC:%d:%s] min cdclk (%d kHz)\n",
259 plane->base.base.id, plane->base.name,
260 new_crtc_state->min_cdclk[plane->id],
261 crtc->base.base.id, crtc->base.name,
262 cdclk_state->min_cdclk[crtc->pipe]);
263 *need_cdclk_calc = true;
264
265 return 0;
266}
267
268static void intel_plane_clear_hw_state(struct intel_plane_state *plane_state)
269{
270 if (plane_state->hw.fb)
271 drm_framebuffer_put(plane_state->hw.fb);
272
273 memset(&plane_state->hw, 0, sizeof(plane_state->hw));
274}
275
276void intel_plane_copy_uapi_to_hw_state(struct intel_plane_state *plane_state,
277 const struct intel_plane_state *from_plane_state,
278 struct intel_crtc *crtc)
279{
280 intel_plane_clear_hw_state(plane_state);
281
282
283
284
285
286
287
288 plane_state->hw.crtc = from_plane_state->uapi.crtc ? &crtc->base : NULL;
289
290 plane_state->hw.fb = from_plane_state->uapi.fb;
291 if (plane_state->hw.fb)
292 drm_framebuffer_get(plane_state->hw.fb);
293
294 plane_state->hw.alpha = from_plane_state->uapi.alpha;
295 plane_state->hw.pixel_blend_mode =
296 from_plane_state->uapi.pixel_blend_mode;
297 plane_state->hw.rotation = from_plane_state->uapi.rotation;
298 plane_state->hw.color_encoding = from_plane_state->uapi.color_encoding;
299 plane_state->hw.color_range = from_plane_state->uapi.color_range;
300 plane_state->hw.scaling_filter = from_plane_state->uapi.scaling_filter;
301
302 plane_state->uapi.src = drm_plane_state_src(&from_plane_state->uapi);
303 plane_state->uapi.dst = drm_plane_state_dest(&from_plane_state->uapi);
304}
305
306void intel_plane_copy_hw_state(struct intel_plane_state *plane_state,
307 const struct intel_plane_state *from_plane_state)
308{
309 intel_plane_clear_hw_state(plane_state);
310
311 memcpy(&plane_state->hw, &from_plane_state->hw,
312 sizeof(plane_state->hw));
313
314 if (plane_state->hw.fb)
315 drm_framebuffer_get(plane_state->hw.fb);
316}
317
318void intel_plane_set_invisible(struct intel_crtc_state *crtc_state,
319 struct intel_plane_state *plane_state)
320{
321 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
322
323 crtc_state->active_planes &= ~BIT(plane->id);
324 crtc_state->nv12_planes &= ~BIT(plane->id);
325 crtc_state->c8_planes &= ~BIT(plane->id);
326 crtc_state->data_rate[plane->id] = 0;
327 crtc_state->min_cdclk[plane->id] = 0;
328
329 plane_state->uapi.visible = false;
330}
331
332int intel_plane_atomic_check_with_state(const struct intel_crtc_state *old_crtc_state,
333 struct intel_crtc_state *new_crtc_state,
334 const struct intel_plane_state *old_plane_state,
335 struct intel_plane_state *new_plane_state)
336{
337 struct intel_plane *plane = to_intel_plane(new_plane_state->uapi.plane);
338 const struct drm_framebuffer *fb = new_plane_state->hw.fb;
339 int ret;
340
341 intel_plane_set_invisible(new_crtc_state, new_plane_state);
342 new_crtc_state->enabled_planes &= ~BIT(plane->id);
343
344 if (!new_plane_state->hw.crtc && !old_plane_state->hw.crtc)
345 return 0;
346
347 ret = plane->check_plane(new_crtc_state, new_plane_state);
348 if (ret)
349 return ret;
350
351 if (fb)
352 new_crtc_state->enabled_planes |= BIT(plane->id);
353
354
355 if (new_plane_state->uapi.visible)
356 new_crtc_state->active_planes |= BIT(plane->id);
357
358 if (new_plane_state->uapi.visible &&
359 intel_format_info_is_yuv_semiplanar(fb->format, fb->modifier))
360 new_crtc_state->nv12_planes |= BIT(plane->id);
361
362 if (new_plane_state->uapi.visible &&
363 fb->format->format == DRM_FORMAT_C8)
364 new_crtc_state->c8_planes |= BIT(plane->id);
365
366 if (new_plane_state->uapi.visible || old_plane_state->uapi.visible)
367 new_crtc_state->update_planes |= BIT(plane->id);
368
369 new_crtc_state->data_rate[plane->id] =
370 intel_plane_data_rate(new_crtc_state, new_plane_state);
371
372 return intel_plane_atomic_calc_changes(old_crtc_state, new_crtc_state,
373 old_plane_state, new_plane_state);
374}
375
376static struct intel_plane *
377intel_crtc_get_plane(struct intel_crtc *crtc, enum plane_id plane_id)
378{
379 struct drm_i915_private *i915 = to_i915(crtc->base.dev);
380 struct intel_plane *plane;
381
382 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) {
383 if (plane->id == plane_id)
384 return plane;
385 }
386
387 return NULL;
388}
389
390int intel_plane_atomic_check(struct intel_atomic_state *state,
391 struct intel_plane *plane)
392{
393 struct drm_i915_private *i915 = to_i915(state->base.dev);
394 struct intel_plane_state *new_plane_state =
395 intel_atomic_get_new_plane_state(state, plane);
396 const struct intel_plane_state *old_plane_state =
397 intel_atomic_get_old_plane_state(state, plane);
398 const struct intel_plane_state *new_master_plane_state;
399 struct intel_crtc *crtc = intel_crtc_for_pipe(i915, plane->pipe);
400 const struct intel_crtc_state *old_crtc_state =
401 intel_atomic_get_old_crtc_state(state, crtc);
402 struct intel_crtc_state *new_crtc_state =
403 intel_atomic_get_new_crtc_state(state, crtc);
404
405 if (new_crtc_state && new_crtc_state->bigjoiner_slave) {
406 struct intel_plane *master_plane =
407 intel_crtc_get_plane(new_crtc_state->bigjoiner_linked_crtc,
408 plane->id);
409
410 new_master_plane_state =
411 intel_atomic_get_new_plane_state(state, master_plane);
412 } else {
413 new_master_plane_state = new_plane_state;
414 }
415
416 intel_plane_copy_uapi_to_hw_state(new_plane_state,
417 new_master_plane_state,
418 crtc);
419
420 new_plane_state->uapi.visible = false;
421 if (!new_crtc_state)
422 return 0;
423
424 return intel_plane_atomic_check_with_state(old_crtc_state,
425 new_crtc_state,
426 old_plane_state,
427 new_plane_state);
428}
429
430static struct intel_plane *
431skl_next_plane_to_commit(struct intel_atomic_state *state,
432 struct intel_crtc *crtc,
433 struct skl_ddb_entry entries_y[I915_MAX_PLANES],
434 struct skl_ddb_entry entries_uv[I915_MAX_PLANES],
435 unsigned int *update_mask)
436{
437 struct intel_crtc_state *crtc_state =
438 intel_atomic_get_new_crtc_state(state, crtc);
439 struct intel_plane_state *plane_state;
440 struct intel_plane *plane;
441 int i;
442
443 if (*update_mask == 0)
444 return NULL;
445
446 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
447 enum plane_id plane_id = plane->id;
448
449 if (crtc->pipe != plane->pipe ||
450 !(*update_mask & BIT(plane_id)))
451 continue;
452
453 if (skl_ddb_allocation_overlaps(&crtc_state->wm.skl.plane_ddb_y[plane_id],
454 entries_y,
455 I915_MAX_PLANES, plane_id) ||
456 skl_ddb_allocation_overlaps(&crtc_state->wm.skl.plane_ddb_uv[plane_id],
457 entries_uv,
458 I915_MAX_PLANES, plane_id))
459 continue;
460
461 *update_mask &= ~BIT(plane_id);
462 entries_y[plane_id] = crtc_state->wm.skl.plane_ddb_y[plane_id];
463 entries_uv[plane_id] = crtc_state->wm.skl.plane_ddb_uv[plane_id];
464
465 return plane;
466 }
467
468
469 drm_WARN_ON(state->base.dev, 1);
470
471 return NULL;
472}
473
474void intel_plane_update_noarm(struct intel_plane *plane,
475 const struct intel_crtc_state *crtc_state,
476 const struct intel_plane_state *plane_state)
477{
478 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
479
480 trace_intel_plane_update_noarm(&plane->base, crtc);
481
482 if (plane->update_noarm)
483 plane->update_noarm(plane, crtc_state, plane_state);
484}
485
486void intel_plane_update_arm(struct intel_plane *plane,
487 const struct intel_crtc_state *crtc_state,
488 const struct intel_plane_state *plane_state)
489{
490 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
491
492 trace_intel_plane_update_arm(&plane->base, crtc);
493
494 if (crtc_state->uapi.async_flip && plane->async_flip)
495 plane->async_flip(plane, crtc_state, plane_state, true);
496 else
497 plane->update_arm(plane, crtc_state, plane_state);
498}
499
500void intel_plane_disable_arm(struct intel_plane *plane,
501 const struct intel_crtc_state *crtc_state)
502{
503 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
504
505 trace_intel_plane_disable_arm(&plane->base, crtc);
506 plane->disable_arm(plane, crtc_state);
507}
508
509void intel_update_planes_on_crtc(struct intel_atomic_state *state,
510 struct intel_crtc *crtc)
511{
512 struct intel_crtc_state *new_crtc_state =
513 intel_atomic_get_new_crtc_state(state, crtc);
514 u32 update_mask = new_crtc_state->update_planes;
515 struct intel_plane_state *new_plane_state;
516 struct intel_plane *plane;
517 int i;
518
519 if (new_crtc_state->uapi.async_flip)
520 return;
521
522
523
524
525
526 for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
527 if (crtc->pipe != plane->pipe ||
528 !(update_mask & BIT(plane->id)))
529 continue;
530
531
532 if (new_plane_state->uapi.visible ||
533 new_plane_state->planar_slave)
534 intel_plane_update_noarm(plane, new_crtc_state, new_plane_state);
535 }
536}
537
538void skl_arm_planes_on_crtc(struct intel_atomic_state *state,
539 struct intel_crtc *crtc)
540{
541 struct intel_crtc_state *old_crtc_state =
542 intel_atomic_get_old_crtc_state(state, crtc);
543 struct intel_crtc_state *new_crtc_state =
544 intel_atomic_get_new_crtc_state(state, crtc);
545 struct skl_ddb_entry entries_y[I915_MAX_PLANES];
546 struct skl_ddb_entry entries_uv[I915_MAX_PLANES];
547 u32 update_mask = new_crtc_state->update_planes;
548 struct intel_plane *plane;
549
550 memcpy(entries_y, old_crtc_state->wm.skl.plane_ddb_y,
551 sizeof(old_crtc_state->wm.skl.plane_ddb_y));
552 memcpy(entries_uv, old_crtc_state->wm.skl.plane_ddb_uv,
553 sizeof(old_crtc_state->wm.skl.plane_ddb_uv));
554
555 while ((plane = skl_next_plane_to_commit(state, crtc,
556 entries_y, entries_uv,
557 &update_mask))) {
558 struct intel_plane_state *new_plane_state =
559 intel_atomic_get_new_plane_state(state, plane);
560
561
562
563
564
565 if (new_plane_state->uapi.visible ||
566 new_plane_state->planar_slave)
567 intel_plane_update_arm(plane, new_crtc_state, new_plane_state);
568 else
569 intel_plane_disable_arm(plane, new_crtc_state);
570 }
571}
572
573void i9xx_arm_planes_on_crtc(struct intel_atomic_state *state,
574 struct intel_crtc *crtc)
575{
576 struct intel_crtc_state *new_crtc_state =
577 intel_atomic_get_new_crtc_state(state, crtc);
578 u32 update_mask = new_crtc_state->update_planes;
579 struct intel_plane_state *new_plane_state;
580 struct intel_plane *plane;
581 int i;
582
583 for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
584 if (crtc->pipe != plane->pipe ||
585 !(update_mask & BIT(plane->id)))
586 continue;
587
588
589
590
591
592 if (new_plane_state->uapi.visible)
593 intel_plane_update_arm(plane, new_crtc_state, new_plane_state);
594 else
595 intel_plane_disable_arm(plane, new_crtc_state);
596 }
597}
598
599int intel_atomic_plane_check_clipping(struct intel_plane_state *plane_state,
600 struct intel_crtc_state *crtc_state,
601 int min_scale, int max_scale,
602 bool can_position)
603{
604 struct drm_framebuffer *fb = plane_state->hw.fb;
605 struct drm_rect *src = &plane_state->uapi.src;
606 struct drm_rect *dst = &plane_state->uapi.dst;
607 unsigned int rotation = plane_state->hw.rotation;
608 struct drm_rect clip = {};
609 int hscale, vscale;
610
611 if (!fb) {
612 plane_state->uapi.visible = false;
613 return 0;
614 }
615
616 drm_rect_rotate(src, fb->width << 16, fb->height << 16, rotation);
617
618
619 hscale = drm_rect_calc_hscale(src, dst, min_scale, max_scale);
620 vscale = drm_rect_calc_vscale(src, dst, min_scale, max_scale);
621 if (hscale < 0 || vscale < 0) {
622 DRM_DEBUG_KMS("Invalid scaling of plane\n");
623 drm_rect_debug_print("src: ", src, true);
624 drm_rect_debug_print("dst: ", dst, false);
625 return -ERANGE;
626 }
627
628 if (crtc_state->hw.enable) {
629 clip.x2 = crtc_state->pipe_src_w;
630 clip.y2 = crtc_state->pipe_src_h;
631 }
632
633
634 if (crtc_state->bigjoiner_slave)
635 drm_rect_translate(dst, -crtc_state->pipe_src_w, 0);
636
637
638
639
640
641 plane_state->uapi.visible = drm_rect_clip_scaled(src, dst, &clip);
642
643 drm_rect_rotate_inv(src, fb->width << 16, fb->height << 16, rotation);
644
645 if (!can_position && plane_state->uapi.visible &&
646 !drm_rect_equals(dst, &clip)) {
647 DRM_DEBUG_KMS("Plane must cover entire CRTC\n");
648 drm_rect_debug_print("dst: ", dst, false);
649 drm_rect_debug_print("clip: ", &clip, false);
650 return -EINVAL;
651 }
652
653 return 0;
654}
655
656struct wait_rps_boost {
657 struct wait_queue_entry wait;
658
659 struct drm_crtc *crtc;
660 struct i915_request *request;
661};
662
663static int do_rps_boost(struct wait_queue_entry *_wait,
664 unsigned mode, int sync, void *key)
665{
666 struct wait_rps_boost *wait = container_of(_wait, typeof(*wait), wait);
667 struct i915_request *rq = wait->request;
668
669
670
671
672
673
674 if (!i915_request_started(rq))
675 intel_rps_boost(rq);
676 i915_request_put(rq);
677
678 drm_crtc_vblank_put(wait->crtc);
679
680 list_del(&wait->wait.entry);
681 kfree(wait);
682 return 1;
683}
684
685static void add_rps_boost_after_vblank(struct drm_crtc *crtc,
686 struct dma_fence *fence)
687{
688 struct wait_rps_boost *wait;
689
690 if (!dma_fence_is_i915(fence))
691 return;
692
693 if (DISPLAY_VER(to_i915(crtc->dev)) < 6)
694 return;
695
696 if (drm_crtc_vblank_get(crtc))
697 return;
698
699 wait = kmalloc(sizeof(*wait), GFP_KERNEL);
700 if (!wait) {
701 drm_crtc_vblank_put(crtc);
702 return;
703 }
704
705 wait->request = to_request(dma_fence_get(fence));
706 wait->crtc = crtc;
707
708 wait->wait.func = do_rps_boost;
709 wait->wait.flags = 0;
710
711 add_wait_queue(drm_crtc_vblank_waitqueue(crtc), &wait->wait);
712}
713
714
715
716
717
718
719
720
721
722
723
724
725
726static int
727intel_prepare_plane_fb(struct drm_plane *_plane,
728 struct drm_plane_state *_new_plane_state)
729{
730 struct i915_sched_attr attr = { .priority = I915_PRIORITY_DISPLAY };
731 struct intel_plane *plane = to_intel_plane(_plane);
732 struct intel_plane_state *new_plane_state =
733 to_intel_plane_state(_new_plane_state);
734 struct intel_atomic_state *state =
735 to_intel_atomic_state(new_plane_state->uapi.state);
736 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
737 const struct intel_plane_state *old_plane_state =
738 intel_atomic_get_old_plane_state(state, plane);
739 struct drm_i915_gem_object *obj = intel_fb_obj(new_plane_state->hw.fb);
740 struct drm_i915_gem_object *old_obj = intel_fb_obj(old_plane_state->hw.fb);
741 int ret;
742
743 if (old_obj) {
744 const struct intel_crtc_state *crtc_state =
745 intel_atomic_get_new_crtc_state(state,
746 to_intel_crtc(old_plane_state->hw.crtc));
747
748
749
750
751
752
753
754
755
756
757
758
759 if (intel_crtc_needs_modeset(crtc_state)) {
760 ret = i915_sw_fence_await_reservation(&state->commit_ready,
761 old_obj->base.resv, NULL,
762 false, 0,
763 GFP_KERNEL);
764 if (ret < 0)
765 return ret;
766 }
767 }
768
769 if (new_plane_state->uapi.fence) {
770 i915_gem_fence_wait_priority(new_plane_state->uapi.fence,
771 &attr);
772 ret = i915_sw_fence_await_dma_fence(&state->commit_ready,
773 new_plane_state->uapi.fence,
774 i915_fence_timeout(dev_priv),
775 GFP_KERNEL);
776 if (ret < 0)
777 return ret;
778 }
779
780 if (!obj)
781 return 0;
782
783
784 ret = intel_plane_pin_fb(new_plane_state);
785 if (ret)
786 return ret;
787
788 i915_gem_object_wait_priority(obj, 0, &attr);
789
790 if (!new_plane_state->uapi.fence) {
791 struct dma_resv_iter cursor;
792 struct dma_fence *fence;
793
794 ret = i915_sw_fence_await_reservation(&state->commit_ready,
795 obj->base.resv, NULL,
796 false,
797 i915_fence_timeout(dev_priv),
798 GFP_KERNEL);
799 if (ret < 0)
800 goto unpin_fb;
801
802 dma_resv_iter_begin(&cursor, obj->base.resv, false);
803 dma_resv_for_each_fence_unlocked(&cursor, fence) {
804 add_rps_boost_after_vblank(new_plane_state->hw.crtc,
805 fence);
806 }
807 dma_resv_iter_end(&cursor);
808 } else {
809 add_rps_boost_after_vblank(new_plane_state->hw.crtc,
810 new_plane_state->uapi.fence);
811 }
812
813
814
815
816
817
818
819
820
821 if (!state->rps_interactive) {
822 intel_rps_mark_interactive(&to_gt(dev_priv)->rps, true);
823 state->rps_interactive = true;
824 }
825
826 return 0;
827
828unpin_fb:
829 intel_plane_unpin_fb(new_plane_state);
830
831 return ret;
832}
833
834
835
836
837
838
839
840
841static void
842intel_cleanup_plane_fb(struct drm_plane *plane,
843 struct drm_plane_state *_old_plane_state)
844{
845 struct intel_plane_state *old_plane_state =
846 to_intel_plane_state(_old_plane_state);
847 struct intel_atomic_state *state =
848 to_intel_atomic_state(old_plane_state->uapi.state);
849 struct drm_i915_private *dev_priv = to_i915(plane->dev);
850 struct drm_i915_gem_object *obj = intel_fb_obj(old_plane_state->hw.fb);
851
852 if (!obj)
853 return;
854
855 if (state->rps_interactive) {
856 intel_rps_mark_interactive(&to_gt(dev_priv)->rps, false);
857 state->rps_interactive = false;
858 }
859
860
861 intel_plane_unpin_fb(old_plane_state);
862}
863
864static const struct drm_plane_helper_funcs intel_plane_helper_funcs = {
865 .prepare_fb = intel_prepare_plane_fb,
866 .cleanup_fb = intel_cleanup_plane_fb,
867};
868
869void intel_plane_helper_add(struct intel_plane *plane)
870{
871 drm_plane_helper_add(&plane->base, &intel_plane_helper_funcs);
872}
873