1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24#include <drm/drm_vblank.h>
25
26#include "amdgpu.h"
27#include "amdgpu_pm.h"
28#include "amdgpu_i2c.h"
29#include "atom.h"
30#include "amdgpu_pll.h"
31#include "amdgpu_connectors.h"
32#ifdef CONFIG_DRM_AMDGPU_SI
33#include "dce_v6_0.h"
34#endif
35#ifdef CONFIG_DRM_AMDGPU_CIK
36#include "dce_v8_0.h"
37#endif
38#include "dce_v10_0.h"
39#include "dce_v11_0.h"
40#include "dce_virtual.h"
41#include "ivsrcid/ivsrcid_vislands30.h"
42
43#define DCE_VIRTUAL_VBLANK_PERIOD 16666666
44
45
46static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
47static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
48static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
49 int index);
50static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
51 int crtc,
52 enum amdgpu_interrupt_state state);
53
54static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
55{
56 return 0;
57}
58
59static void dce_virtual_page_flip(struct amdgpu_device *adev,
60 int crtc_id, u64 crtc_base, bool async)
61{
62 return;
63}
64
65static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
66 u32 *vbl, u32 *position)
67{
68 *vbl = 0;
69 *position = 0;
70
71 return -EINVAL;
72}
73
74static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
75 enum amdgpu_hpd_id hpd)
76{
77 return true;
78}
79
80static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
81 enum amdgpu_hpd_id hpd)
82{
83 return;
84}
85
86static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
87{
88 return 0;
89}
90
91
92
93
94
95
96
97
98
99static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
100{
101 return;
102}
103
104static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
105 u16 *green, u16 *blue, uint32_t size,
106 struct drm_modeset_acquire_ctx *ctx)
107{
108 return 0;
109}
110
111static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
112{
113 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
114
115 drm_crtc_cleanup(crtc);
116 kfree(amdgpu_crtc);
117}
118
119static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
120 .cursor_set2 = NULL,
121 .cursor_move = NULL,
122 .gamma_set = dce_virtual_crtc_gamma_set,
123 .set_config = amdgpu_display_crtc_set_config,
124 .destroy = dce_virtual_crtc_destroy,
125 .page_flip_target = amdgpu_display_crtc_page_flip_target,
126};
127
128static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
129{
130 struct drm_device *dev = crtc->dev;
131 struct amdgpu_device *adev = dev->dev_private;
132 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
133 unsigned type;
134
135 if (amdgpu_sriov_vf(adev))
136 return;
137
138 switch (mode) {
139 case DRM_MODE_DPMS_ON:
140 amdgpu_crtc->enabled = true;
141
142 type = amdgpu_display_crtc_idx_to_irq_type(adev,
143 amdgpu_crtc->crtc_id);
144 amdgpu_irq_update(adev, &adev->crtc_irq, type);
145 drm_crtc_vblank_on(crtc);
146 break;
147 case DRM_MODE_DPMS_STANDBY:
148 case DRM_MODE_DPMS_SUSPEND:
149 case DRM_MODE_DPMS_OFF:
150 drm_crtc_vblank_off(crtc);
151 amdgpu_crtc->enabled = false;
152 break;
153 }
154}
155
156
157static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
158{
159 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
160}
161
162static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
163{
164 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
165}
166
167static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
168{
169 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
170
171 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
172
173 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
174 amdgpu_crtc->encoder = NULL;
175 amdgpu_crtc->connector = NULL;
176}
177
178static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
179 struct drm_display_mode *mode,
180 struct drm_display_mode *adjusted_mode,
181 int x, int y, struct drm_framebuffer *old_fb)
182{
183 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
184
185
186 amdgpu_crtc->hw_mode = *adjusted_mode;
187
188 return 0;
189}
190
191static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
192 const struct drm_display_mode *mode,
193 struct drm_display_mode *adjusted_mode)
194{
195 return true;
196}
197
198
199static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
200 struct drm_framebuffer *old_fb)
201{
202 return 0;
203}
204
205static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
206 struct drm_framebuffer *fb,
207 int x, int y, enum mode_set_atomic state)
208{
209 return 0;
210}
211
212static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
213 .dpms = dce_virtual_crtc_dpms,
214 .mode_fixup = dce_virtual_crtc_mode_fixup,
215 .mode_set = dce_virtual_crtc_mode_set,
216 .mode_set_base = dce_virtual_crtc_set_base,
217 .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
218 .prepare = dce_virtual_crtc_prepare,
219 .commit = dce_virtual_crtc_commit,
220 .disable = dce_virtual_crtc_disable,
221};
222
223static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
224{
225 struct amdgpu_crtc *amdgpu_crtc;
226
227 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
228 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
229 if (amdgpu_crtc == NULL)
230 return -ENOMEM;
231
232 drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
233
234 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
235 amdgpu_crtc->crtc_id = index;
236 adev->mode_info.crtcs[index] = amdgpu_crtc;
237
238 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
239 amdgpu_crtc->encoder = NULL;
240 amdgpu_crtc->connector = NULL;
241 amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
242 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
243
244 return 0;
245}
246
247static int dce_virtual_early_init(void *handle)
248{
249 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
250
251 dce_virtual_set_display_funcs(adev);
252 dce_virtual_set_irq_funcs(adev);
253
254 adev->mode_info.num_hpd = 1;
255 adev->mode_info.num_dig = 1;
256 return 0;
257}
258
259static struct drm_encoder *
260dce_virtual_encoder(struct drm_connector *connector)
261{
262 struct drm_encoder *encoder;
263 int i;
264
265 drm_connector_for_each_possible_encoder(connector, encoder, i) {
266 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
267 return encoder;
268 }
269
270
271 drm_connector_for_each_possible_encoder(connector, encoder, i)
272 return encoder;
273
274 return NULL;
275}
276
277static int dce_virtual_get_modes(struct drm_connector *connector)
278{
279 struct drm_device *dev = connector->dev;
280 struct drm_display_mode *mode = NULL;
281 unsigned i;
282 static const struct mode_size {
283 int w;
284 int h;
285 } common_modes[17] = {
286 { 640, 480},
287 { 720, 480},
288 { 800, 600},
289 { 848, 480},
290 {1024, 768},
291 {1152, 768},
292 {1280, 720},
293 {1280, 800},
294 {1280, 854},
295 {1280, 960},
296 {1280, 1024},
297 {1440, 900},
298 {1400, 1050},
299 {1680, 1050},
300 {1600, 1200},
301 {1920, 1080},
302 {1920, 1200}
303 };
304
305 for (i = 0; i < 17; i++) {
306 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
307 drm_mode_probed_add(connector, mode);
308 }
309
310 return 0;
311}
312
313static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector,
314 struct drm_display_mode *mode)
315{
316 return MODE_OK;
317}
318
319static int
320dce_virtual_dpms(struct drm_connector *connector, int mode)
321{
322 return 0;
323}
324
325static int
326dce_virtual_set_property(struct drm_connector *connector,
327 struct drm_property *property,
328 uint64_t val)
329{
330 return 0;
331}
332
333static void dce_virtual_destroy(struct drm_connector *connector)
334{
335 drm_connector_unregister(connector);
336 drm_connector_cleanup(connector);
337 kfree(connector);
338}
339
340static void dce_virtual_force(struct drm_connector *connector)
341{
342 return;
343}
344
345static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
346 .get_modes = dce_virtual_get_modes,
347 .mode_valid = dce_virtual_mode_valid,
348 .best_encoder = dce_virtual_encoder,
349};
350
351static const struct drm_connector_funcs dce_virtual_connector_funcs = {
352 .dpms = dce_virtual_dpms,
353 .fill_modes = drm_helper_probe_single_connector_modes,
354 .set_property = dce_virtual_set_property,
355 .destroy = dce_virtual_destroy,
356 .force = dce_virtual_force,
357};
358
359static int dce_virtual_sw_init(void *handle)
360{
361 int r, i;
362 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
363
364 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq);
365 if (r)
366 return r;
367
368 adev->ddev->max_vblank_count = 0;
369
370 adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
371
372 adev->ddev->mode_config.max_width = 16384;
373 adev->ddev->mode_config.max_height = 16384;
374
375 adev->ddev->mode_config.preferred_depth = 24;
376 adev->ddev->mode_config.prefer_shadow = 1;
377
378 adev->ddev->mode_config.fb_base = adev->gmc.aper_base;
379
380 r = amdgpu_display_modeset_create_props(adev);
381 if (r)
382 return r;
383
384 adev->ddev->mode_config.max_width = 16384;
385 adev->ddev->mode_config.max_height = 16384;
386
387
388 for (i = 0; i < adev->mode_info.num_crtc; i++) {
389 r = dce_virtual_crtc_init(adev, i);
390 if (r)
391 return r;
392 r = dce_virtual_connector_encoder_init(adev, i);
393 if (r)
394 return r;
395 }
396
397 drm_kms_helper_poll_init(adev->ddev);
398
399 adev->mode_info.mode_config_initialized = true;
400 return 0;
401}
402
403static int dce_virtual_sw_fini(void *handle)
404{
405 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
406
407 kfree(adev->mode_info.bios_hardcoded_edid);
408
409 drm_kms_helper_poll_fini(adev->ddev);
410
411 drm_mode_config_cleanup(adev->ddev);
412
413 memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
414 adev->mode_info.mode_config_initialized = false;
415 return 0;
416}
417
418static int dce_virtual_hw_init(void *handle)
419{
420 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
421
422 switch (adev->asic_type) {
423#ifdef CONFIG_DRM_AMDGPU_SI
424 case CHIP_TAHITI:
425 case CHIP_PITCAIRN:
426 case CHIP_VERDE:
427 case CHIP_OLAND:
428 dce_v6_0_disable_dce(adev);
429 break;
430#endif
431#ifdef CONFIG_DRM_AMDGPU_CIK
432 case CHIP_BONAIRE:
433 case CHIP_HAWAII:
434 case CHIP_KAVERI:
435 case CHIP_KABINI:
436 case CHIP_MULLINS:
437 dce_v8_0_disable_dce(adev);
438 break;
439#endif
440 case CHIP_FIJI:
441 case CHIP_TONGA:
442 dce_v10_0_disable_dce(adev);
443 break;
444 case CHIP_CARRIZO:
445 case CHIP_STONEY:
446 case CHIP_POLARIS10:
447 case CHIP_POLARIS11:
448 case CHIP_VEGAM:
449 dce_v11_0_disable_dce(adev);
450 break;
451 case CHIP_TOPAZ:
452#ifdef CONFIG_DRM_AMDGPU_SI
453 case CHIP_HAINAN:
454#endif
455
456 break;
457 default:
458 break;
459 }
460 return 0;
461}
462
463static int dce_virtual_hw_fini(void *handle)
464{
465 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
466 int i = 0;
467
468 for (i = 0; i<adev->mode_info.num_crtc; i++)
469 if (adev->mode_info.crtcs[i])
470 dce_virtual_set_crtc_vblank_interrupt_state(adev, i, AMDGPU_IRQ_STATE_DISABLE);
471
472 return 0;
473}
474
475static int dce_virtual_suspend(void *handle)
476{
477 return dce_virtual_hw_fini(handle);
478}
479
480static int dce_virtual_resume(void *handle)
481{
482 return dce_virtual_hw_init(handle);
483}
484
485static bool dce_virtual_is_idle(void *handle)
486{
487 return true;
488}
489
490static int dce_virtual_wait_for_idle(void *handle)
491{
492 return 0;
493}
494
495static int dce_virtual_soft_reset(void *handle)
496{
497 return 0;
498}
499
500static int dce_virtual_set_clockgating_state(void *handle,
501 enum amd_clockgating_state state)
502{
503 return 0;
504}
505
506static int dce_virtual_set_powergating_state(void *handle,
507 enum amd_powergating_state state)
508{
509 return 0;
510}
511
512static const struct amd_ip_funcs dce_virtual_ip_funcs = {
513 .name = "dce_virtual",
514 .early_init = dce_virtual_early_init,
515 .late_init = NULL,
516 .sw_init = dce_virtual_sw_init,
517 .sw_fini = dce_virtual_sw_fini,
518 .hw_init = dce_virtual_hw_init,
519 .hw_fini = dce_virtual_hw_fini,
520 .suspend = dce_virtual_suspend,
521 .resume = dce_virtual_resume,
522 .is_idle = dce_virtual_is_idle,
523 .wait_for_idle = dce_virtual_wait_for_idle,
524 .soft_reset = dce_virtual_soft_reset,
525 .set_clockgating_state = dce_virtual_set_clockgating_state,
526 .set_powergating_state = dce_virtual_set_powergating_state,
527};
528
529
530static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
531{
532 return;
533}
534
535static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
536{
537 return;
538}
539
540static void
541dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
542 struct drm_display_mode *mode,
543 struct drm_display_mode *adjusted_mode)
544{
545 return;
546}
547
548static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
549{
550 return;
551}
552
553static void
554dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
555{
556 return;
557}
558
559static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
560 const struct drm_display_mode *mode,
561 struct drm_display_mode *adjusted_mode)
562{
563 return true;
564}
565
566static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
567 .dpms = dce_virtual_encoder_dpms,
568 .mode_fixup = dce_virtual_encoder_mode_fixup,
569 .prepare = dce_virtual_encoder_prepare,
570 .mode_set = dce_virtual_encoder_mode_set,
571 .commit = dce_virtual_encoder_commit,
572 .disable = dce_virtual_encoder_disable,
573};
574
575static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
576{
577 drm_encoder_cleanup(encoder);
578 kfree(encoder);
579}
580
581static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
582 .destroy = dce_virtual_encoder_destroy,
583};
584
585static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
586 int index)
587{
588 struct drm_encoder *encoder;
589 struct drm_connector *connector;
590
591
592 encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
593 if (!encoder)
594 return -ENOMEM;
595 encoder->possible_crtcs = 1 << index;
596 drm_encoder_init(adev->ddev, encoder, &dce_virtual_encoder_funcs,
597 DRM_MODE_ENCODER_VIRTUAL, NULL);
598 drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
599
600 connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
601 if (!connector) {
602 kfree(encoder);
603 return -ENOMEM;
604 }
605
606
607 drm_connector_init(adev->ddev, connector, &dce_virtual_connector_funcs,
608 DRM_MODE_CONNECTOR_VIRTUAL);
609 drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
610 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
611 connector->interlace_allowed = false;
612 connector->doublescan_allowed = false;
613 drm_connector_register(connector);
614
615
616 drm_connector_attach_encoder(connector, encoder);
617
618 return 0;
619}
620
621static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
622 .bandwidth_update = &dce_virtual_bandwidth_update,
623 .vblank_get_counter = &dce_virtual_vblank_get_counter,
624 .backlight_set_level = NULL,
625 .backlight_get_level = NULL,
626 .hpd_sense = &dce_virtual_hpd_sense,
627 .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
628 .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
629 .page_flip = &dce_virtual_page_flip,
630 .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
631 .add_encoder = NULL,
632 .add_connector = NULL,
633};
634
635static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
636{
637 adev->mode_info.funcs = &dce_virtual_display_funcs;
638}
639
640static int dce_virtual_pageflip(struct amdgpu_device *adev,
641 unsigned crtc_id)
642{
643 unsigned long flags;
644 struct amdgpu_crtc *amdgpu_crtc;
645 struct amdgpu_flip_work *works;
646
647 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
648
649 if (crtc_id >= adev->mode_info.num_crtc) {
650 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
651 return -EINVAL;
652 }
653
654
655 if (amdgpu_crtc == NULL)
656 return 0;
657
658 spin_lock_irqsave(&adev->ddev->event_lock, flags);
659 works = amdgpu_crtc->pflip_works;
660 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
661 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
662 "AMDGPU_FLIP_SUBMITTED(%d)\n",
663 amdgpu_crtc->pflip_status,
664 AMDGPU_FLIP_SUBMITTED);
665 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
666 return 0;
667 }
668
669
670 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
671 amdgpu_crtc->pflip_works = NULL;
672
673
674 if (works->event)
675 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
676
677 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
678
679 drm_crtc_vblank_put(&amdgpu_crtc->base);
680 amdgpu_bo_unref(&works->old_abo);
681 kfree(works->shared);
682 kfree(works);
683
684 return 0;
685}
686
687static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
688{
689 struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
690 struct amdgpu_crtc, vblank_timer);
691 struct drm_device *ddev = amdgpu_crtc->base.dev;
692 struct amdgpu_device *adev = ddev->dev_private;
693
694 drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
695 dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
696 hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
697 HRTIMER_MODE_REL);
698
699 return HRTIMER_NORESTART;
700}
701
702static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
703 int crtc,
704 enum amdgpu_interrupt_state state)
705{
706 if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
707 DRM_DEBUG("invalid crtc %d\n", crtc);
708 return;
709 }
710
711 if (state && !adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
712 DRM_DEBUG("Enable software vsync timer\n");
713 hrtimer_init(&adev->mode_info.crtcs[crtc]->vblank_timer,
714 CLOCK_MONOTONIC, HRTIMER_MODE_REL);
715 hrtimer_set_expires(&adev->mode_info.crtcs[crtc]->vblank_timer,
716 DCE_VIRTUAL_VBLANK_PERIOD);
717 adev->mode_info.crtcs[crtc]->vblank_timer.function =
718 dce_virtual_vblank_timer_handle;
719 hrtimer_start(&adev->mode_info.crtcs[crtc]->vblank_timer,
720 DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
721 } else if (!state && adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
722 DRM_DEBUG("Disable software vsync timer\n");
723 hrtimer_cancel(&adev->mode_info.crtcs[crtc]->vblank_timer);
724 }
725
726 adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
727 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
728}
729
730
731static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
732 struct amdgpu_irq_src *source,
733 unsigned type,
734 enum amdgpu_interrupt_state state)
735{
736 if (type > AMDGPU_CRTC_IRQ_VBLANK6)
737 return -EINVAL;
738
739 dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
740
741 return 0;
742}
743
744static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
745 .set = dce_virtual_set_crtc_irq_state,
746 .process = NULL,
747};
748
749static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
750{
751 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VBLANK6 + 1;
752 adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
753}
754
755const struct amdgpu_ip_block_version dce_virtual_ip_block =
756{
757 .type = AMD_IP_BLOCK_TYPE_DCE,
758 .major = 1,
759 .minor = 0,
760 .rev = 0,
761 .funcs = &dce_virtual_ip_funcs,
762};
763