1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24#include <drm/drm_vblank.h>
25
26#include "amdgpu.h"
27#include "amdgpu_pm.h"
28#include "amdgpu_i2c.h"
29#include "atom.h"
30#include "amdgpu_pll.h"
31#include "amdgpu_connectors.h"
32#ifdef CONFIG_DRM_AMDGPU_SI
33#include "dce_v6_0.h"
34#endif
35#ifdef CONFIG_DRM_AMDGPU_CIK
36#include "dce_v8_0.h"
37#endif
38#include "dce_v10_0.h"
39#include "dce_v11_0.h"
40#include "dce_virtual.h"
41#include "ivsrcid/ivsrcid_vislands30.h"
42
43#define DCE_VIRTUAL_VBLANK_PERIOD 16666666
44
45
46static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
47static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
48static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
49 int index);
50static int dce_virtual_pageflip(struct amdgpu_device *adev,
51 unsigned crtc_id);
52static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer);
53static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
54 int crtc,
55 enum amdgpu_interrupt_state state);
56
57static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
58{
59 return 0;
60}
61
62static void dce_virtual_page_flip(struct amdgpu_device *adev,
63 int crtc_id, u64 crtc_base, bool async)
64{
65 return;
66}
67
68static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
69 u32 *vbl, u32 *position)
70{
71 *vbl = 0;
72 *position = 0;
73
74 return -EINVAL;
75}
76
77static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
78 enum amdgpu_hpd_id hpd)
79{
80 return true;
81}
82
83static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
84 enum amdgpu_hpd_id hpd)
85{
86 return;
87}
88
89static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
90{
91 return 0;
92}
93
94
95
96
97
98
99
100
101
102static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
103{
104 return;
105}
106
107static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
108 u16 *green, u16 *blue, uint32_t size,
109 struct drm_modeset_acquire_ctx *ctx)
110{
111 return 0;
112}
113
114static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
115{
116 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
117
118 drm_crtc_cleanup(crtc);
119 kfree(amdgpu_crtc);
120}
121
122static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
123 .cursor_set2 = NULL,
124 .cursor_move = NULL,
125 .gamma_set = dce_virtual_crtc_gamma_set,
126 .set_config = amdgpu_display_crtc_set_config,
127 .destroy = dce_virtual_crtc_destroy,
128 .page_flip_target = amdgpu_display_crtc_page_flip_target,
129 .get_vblank_counter = amdgpu_get_vblank_counter_kms,
130 .enable_vblank = amdgpu_enable_vblank_kms,
131 .disable_vblank = amdgpu_disable_vblank_kms,
132 .get_vblank_timestamp = drm_crtc_vblank_helper_get_vblank_timestamp,
133};
134
135static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
136{
137 struct drm_device *dev = crtc->dev;
138 struct amdgpu_device *adev = drm_to_adev(dev);
139 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
140 unsigned type;
141
142 if (amdgpu_sriov_vf(adev))
143 return;
144
145 switch (mode) {
146 case DRM_MODE_DPMS_ON:
147 amdgpu_crtc->enabled = true;
148
149 type = amdgpu_display_crtc_idx_to_irq_type(adev,
150 amdgpu_crtc->crtc_id);
151 amdgpu_irq_update(adev, &adev->crtc_irq, type);
152 drm_crtc_vblank_on(crtc);
153 break;
154 case DRM_MODE_DPMS_STANDBY:
155 case DRM_MODE_DPMS_SUSPEND:
156 case DRM_MODE_DPMS_OFF:
157 drm_crtc_vblank_off(crtc);
158 amdgpu_crtc->enabled = false;
159 break;
160 }
161}
162
163
164static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
165{
166 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
167}
168
169static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
170{
171 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
172}
173
174static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
175{
176 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
177 struct drm_device *dev = crtc->dev;
178
179 if (dev->num_crtcs)
180 drm_crtc_vblank_off(crtc);
181
182 amdgpu_crtc->enabled = false;
183 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
184 amdgpu_crtc->encoder = NULL;
185 amdgpu_crtc->connector = NULL;
186}
187
188static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
189 struct drm_display_mode *mode,
190 struct drm_display_mode *adjusted_mode,
191 int x, int y, struct drm_framebuffer *old_fb)
192{
193 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
194
195
196 amdgpu_crtc->hw_mode = *adjusted_mode;
197
198 return 0;
199}
200
201static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
202 const struct drm_display_mode *mode,
203 struct drm_display_mode *adjusted_mode)
204{
205 return true;
206}
207
208
209static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
210 struct drm_framebuffer *old_fb)
211{
212 return 0;
213}
214
215static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
216 struct drm_framebuffer *fb,
217 int x, int y, enum mode_set_atomic state)
218{
219 return 0;
220}
221
222static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
223 .dpms = dce_virtual_crtc_dpms,
224 .mode_fixup = dce_virtual_crtc_mode_fixup,
225 .mode_set = dce_virtual_crtc_mode_set,
226 .mode_set_base = dce_virtual_crtc_set_base,
227 .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
228 .prepare = dce_virtual_crtc_prepare,
229 .commit = dce_virtual_crtc_commit,
230 .disable = dce_virtual_crtc_disable,
231 .get_scanout_position = amdgpu_crtc_get_scanout_position,
232};
233
234static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
235{
236 struct amdgpu_crtc *amdgpu_crtc;
237
238 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
239 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
240 if (amdgpu_crtc == NULL)
241 return -ENOMEM;
242
243 drm_crtc_init(adev_to_drm(adev), &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
244
245 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
246 amdgpu_crtc->crtc_id = index;
247 adev->mode_info.crtcs[index] = amdgpu_crtc;
248
249 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
250 amdgpu_crtc->encoder = NULL;
251 amdgpu_crtc->connector = NULL;
252 amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
253 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
254
255 hrtimer_init(&amdgpu_crtc->vblank_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
256 hrtimer_set_expires(&amdgpu_crtc->vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD);
257 amdgpu_crtc->vblank_timer.function = dce_virtual_vblank_timer_handle;
258 hrtimer_start(&amdgpu_crtc->vblank_timer,
259 DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
260 return 0;
261}
262
263static int dce_virtual_early_init(void *handle)
264{
265 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
266
267 dce_virtual_set_display_funcs(adev);
268 dce_virtual_set_irq_funcs(adev);
269
270 adev->mode_info.num_hpd = 1;
271 adev->mode_info.num_dig = 1;
272 return 0;
273}
274
275static struct drm_encoder *
276dce_virtual_encoder(struct drm_connector *connector)
277{
278 struct drm_encoder *encoder;
279
280 drm_connector_for_each_possible_encoder(connector, encoder) {
281 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
282 return encoder;
283 }
284
285
286 drm_connector_for_each_possible_encoder(connector, encoder)
287 return encoder;
288
289 return NULL;
290}
291
292static int dce_virtual_get_modes(struct drm_connector *connector)
293{
294 struct drm_device *dev = connector->dev;
295 struct drm_display_mode *mode = NULL;
296 unsigned i;
297 static const struct mode_size {
298 int w;
299 int h;
300 } common_modes[21] = {
301 { 640, 480},
302 { 720, 480},
303 { 800, 600},
304 { 848, 480},
305 {1024, 768},
306 {1152, 768},
307 {1280, 720},
308 {1280, 800},
309 {1280, 854},
310 {1280, 960},
311 {1280, 1024},
312 {1440, 900},
313 {1400, 1050},
314 {1680, 1050},
315 {1600, 1200},
316 {1920, 1080},
317 {1920, 1200},
318 {4096, 3112},
319 {3656, 2664},
320 {3840, 2160},
321 {4096, 2160},
322 };
323
324 for (i = 0; i < 21; i++) {
325 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
326 drm_mode_probed_add(connector, mode);
327 }
328
329 return 0;
330}
331
332static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector,
333 struct drm_display_mode *mode)
334{
335 return MODE_OK;
336}
337
338static int
339dce_virtual_dpms(struct drm_connector *connector, int mode)
340{
341 return 0;
342}
343
344static int
345dce_virtual_set_property(struct drm_connector *connector,
346 struct drm_property *property,
347 uint64_t val)
348{
349 return 0;
350}
351
352static void dce_virtual_destroy(struct drm_connector *connector)
353{
354 drm_connector_unregister(connector);
355 drm_connector_cleanup(connector);
356 kfree(connector);
357}
358
359static void dce_virtual_force(struct drm_connector *connector)
360{
361 return;
362}
363
364static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
365 .get_modes = dce_virtual_get_modes,
366 .mode_valid = dce_virtual_mode_valid,
367 .best_encoder = dce_virtual_encoder,
368};
369
370static const struct drm_connector_funcs dce_virtual_connector_funcs = {
371 .dpms = dce_virtual_dpms,
372 .fill_modes = drm_helper_probe_single_connector_modes,
373 .set_property = dce_virtual_set_property,
374 .destroy = dce_virtual_destroy,
375 .force = dce_virtual_force,
376};
377
378static int dce_virtual_sw_init(void *handle)
379{
380 int r, i;
381 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
382
383 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq);
384 if (r)
385 return r;
386
387 adev_to_drm(adev)->max_vblank_count = 0;
388
389 adev_to_drm(adev)->mode_config.funcs = &amdgpu_mode_funcs;
390
391 adev_to_drm(adev)->mode_config.max_width = 16384;
392 adev_to_drm(adev)->mode_config.max_height = 16384;
393
394 adev_to_drm(adev)->mode_config.preferred_depth = 24;
395 adev_to_drm(adev)->mode_config.prefer_shadow = 1;
396
397 adev_to_drm(adev)->mode_config.fb_base = adev->gmc.aper_base;
398
399 r = amdgpu_display_modeset_create_props(adev);
400 if (r)
401 return r;
402
403 adev_to_drm(adev)->mode_config.max_width = 16384;
404 adev_to_drm(adev)->mode_config.max_height = 16384;
405
406
407 for (i = 0; i < adev->mode_info.num_crtc; i++) {
408 r = dce_virtual_crtc_init(adev, i);
409 if (r)
410 return r;
411 r = dce_virtual_connector_encoder_init(adev, i);
412 if (r)
413 return r;
414 }
415
416 drm_kms_helper_poll_init(adev_to_drm(adev));
417
418 adev->mode_info.mode_config_initialized = true;
419 return 0;
420}
421
422static int dce_virtual_sw_fini(void *handle)
423{
424 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
425
426 kfree(adev->mode_info.bios_hardcoded_edid);
427
428 drm_kms_helper_poll_fini(adev_to_drm(adev));
429
430 drm_mode_config_cleanup(adev_to_drm(adev));
431
432 memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
433 adev->mode_info.mode_config_initialized = false;
434 return 0;
435}
436
437static int dce_virtual_hw_init(void *handle)
438{
439 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
440
441 switch (adev->asic_type) {
442#ifdef CONFIG_DRM_AMDGPU_SI
443 case CHIP_TAHITI:
444 case CHIP_PITCAIRN:
445 case CHIP_VERDE:
446 case CHIP_OLAND:
447 dce_v6_0_disable_dce(adev);
448 break;
449#endif
450#ifdef CONFIG_DRM_AMDGPU_CIK
451 case CHIP_BONAIRE:
452 case CHIP_HAWAII:
453 case CHIP_KAVERI:
454 case CHIP_KABINI:
455 case CHIP_MULLINS:
456 dce_v8_0_disable_dce(adev);
457 break;
458#endif
459 case CHIP_FIJI:
460 case CHIP_TONGA:
461 dce_v10_0_disable_dce(adev);
462 break;
463 case CHIP_CARRIZO:
464 case CHIP_STONEY:
465 case CHIP_POLARIS10:
466 case CHIP_POLARIS11:
467 case CHIP_VEGAM:
468 dce_v11_0_disable_dce(adev);
469 break;
470 case CHIP_TOPAZ:
471#ifdef CONFIG_DRM_AMDGPU_SI
472 case CHIP_HAINAN:
473#endif
474
475 break;
476 default:
477 break;
478 }
479 return 0;
480}
481
482static int dce_virtual_hw_fini(void *handle)
483{
484 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
485 int i = 0;
486
487 for (i = 0; i<adev->mode_info.num_crtc; i++)
488 if (adev->mode_info.crtcs[i])
489 hrtimer_cancel(&adev->mode_info.crtcs[i]->vblank_timer);
490
491 return 0;
492}
493
494static int dce_virtual_suspend(void *handle)
495{
496 return dce_virtual_hw_fini(handle);
497}
498
499static int dce_virtual_resume(void *handle)
500{
501 return dce_virtual_hw_init(handle);
502}
503
504static bool dce_virtual_is_idle(void *handle)
505{
506 return true;
507}
508
509static int dce_virtual_wait_for_idle(void *handle)
510{
511 return 0;
512}
513
514static int dce_virtual_soft_reset(void *handle)
515{
516 return 0;
517}
518
519static int dce_virtual_set_clockgating_state(void *handle,
520 enum amd_clockgating_state state)
521{
522 return 0;
523}
524
525static int dce_virtual_set_powergating_state(void *handle,
526 enum amd_powergating_state state)
527{
528 return 0;
529}
530
531static const struct amd_ip_funcs dce_virtual_ip_funcs = {
532 .name = "dce_virtual",
533 .early_init = dce_virtual_early_init,
534 .late_init = NULL,
535 .sw_init = dce_virtual_sw_init,
536 .sw_fini = dce_virtual_sw_fini,
537 .hw_init = dce_virtual_hw_init,
538 .hw_fini = dce_virtual_hw_fini,
539 .suspend = dce_virtual_suspend,
540 .resume = dce_virtual_resume,
541 .is_idle = dce_virtual_is_idle,
542 .wait_for_idle = dce_virtual_wait_for_idle,
543 .soft_reset = dce_virtual_soft_reset,
544 .set_clockgating_state = dce_virtual_set_clockgating_state,
545 .set_powergating_state = dce_virtual_set_powergating_state,
546};
547
548
549static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
550{
551 return;
552}
553
554static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
555{
556 return;
557}
558
559static void
560dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
561 struct drm_display_mode *mode,
562 struct drm_display_mode *adjusted_mode)
563{
564 return;
565}
566
567static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
568{
569 return;
570}
571
572static void
573dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
574{
575 return;
576}
577
578static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
579 const struct drm_display_mode *mode,
580 struct drm_display_mode *adjusted_mode)
581{
582 return true;
583}
584
585static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
586 .dpms = dce_virtual_encoder_dpms,
587 .mode_fixup = dce_virtual_encoder_mode_fixup,
588 .prepare = dce_virtual_encoder_prepare,
589 .mode_set = dce_virtual_encoder_mode_set,
590 .commit = dce_virtual_encoder_commit,
591 .disable = dce_virtual_encoder_disable,
592};
593
594static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
595{
596 drm_encoder_cleanup(encoder);
597 kfree(encoder);
598}
599
600static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
601 .destroy = dce_virtual_encoder_destroy,
602};
603
604static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
605 int index)
606{
607 struct drm_encoder *encoder;
608 struct drm_connector *connector;
609
610
611 encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
612 if (!encoder)
613 return -ENOMEM;
614 encoder->possible_crtcs = 1 << index;
615 drm_encoder_init(adev_to_drm(adev), encoder, &dce_virtual_encoder_funcs,
616 DRM_MODE_ENCODER_VIRTUAL, NULL);
617 drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
618
619 connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
620 if (!connector) {
621 kfree(encoder);
622 return -ENOMEM;
623 }
624
625
626 drm_connector_init(adev_to_drm(adev), connector, &dce_virtual_connector_funcs,
627 DRM_MODE_CONNECTOR_VIRTUAL);
628 drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
629 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
630 connector->interlace_allowed = false;
631 connector->doublescan_allowed = false;
632
633
634 drm_connector_attach_encoder(connector, encoder);
635
636 return 0;
637}
638
639static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
640 .bandwidth_update = &dce_virtual_bandwidth_update,
641 .vblank_get_counter = &dce_virtual_vblank_get_counter,
642 .backlight_set_level = NULL,
643 .backlight_get_level = NULL,
644 .hpd_sense = &dce_virtual_hpd_sense,
645 .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
646 .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
647 .page_flip = &dce_virtual_page_flip,
648 .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
649 .add_encoder = NULL,
650 .add_connector = NULL,
651};
652
653static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
654{
655 adev->mode_info.funcs = &dce_virtual_display_funcs;
656}
657
658static int dce_virtual_pageflip(struct amdgpu_device *adev,
659 unsigned crtc_id)
660{
661 unsigned long flags;
662 struct amdgpu_crtc *amdgpu_crtc;
663 struct amdgpu_flip_work *works;
664
665 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
666
667 if (crtc_id >= adev->mode_info.num_crtc) {
668 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
669 return -EINVAL;
670 }
671
672
673 if (amdgpu_crtc == NULL)
674 return 0;
675
676 spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags);
677 works = amdgpu_crtc->pflip_works;
678 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
679 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
680 "AMDGPU_FLIP_SUBMITTED(%d)\n",
681 amdgpu_crtc->pflip_status,
682 AMDGPU_FLIP_SUBMITTED);
683 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
684 return 0;
685 }
686
687
688 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
689 amdgpu_crtc->pflip_works = NULL;
690
691
692 if (works->event)
693 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
694
695 spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
696
697 drm_crtc_vblank_put(&amdgpu_crtc->base);
698 amdgpu_bo_unref(&works->old_abo);
699 kfree(works->shared);
700 kfree(works);
701
702 return 0;
703}
704
705static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
706{
707 struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
708 struct amdgpu_crtc, vblank_timer);
709 struct drm_device *ddev = amdgpu_crtc->base.dev;
710 struct amdgpu_device *adev = drm_to_adev(ddev);
711 struct amdgpu_irq_src *source = adev->irq.client[AMDGPU_IRQ_CLIENTID_LEGACY].sources
712 [VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER];
713 int irq_type = amdgpu_display_crtc_idx_to_irq_type(adev,
714 amdgpu_crtc->crtc_id);
715
716 if (amdgpu_irq_enabled(adev, source, irq_type)) {
717 drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
718 dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
719 }
720 hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
721 HRTIMER_MODE_REL);
722
723 return HRTIMER_NORESTART;
724}
725
726static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
727 int crtc,
728 enum amdgpu_interrupt_state state)
729{
730 if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
731 DRM_DEBUG("invalid crtc %d\n", crtc);
732 return;
733 }
734
735 adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
736 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
737}
738
739
740static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
741 struct amdgpu_irq_src *source,
742 unsigned type,
743 enum amdgpu_interrupt_state state)
744{
745 if (type > AMDGPU_CRTC_IRQ_VBLANK6)
746 return -EINVAL;
747
748 dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
749
750 return 0;
751}
752
753static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
754 .set = dce_virtual_set_crtc_irq_state,
755 .process = NULL,
756};
757
758static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
759{
760 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VBLANK6 + 1;
761 adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
762}
763
764const struct amdgpu_ip_block_version dce_virtual_ip_block =
765{
766 .type = AMD_IP_BLOCK_TYPE_DCE,
767 .major = 1,
768 .minor = 0,
769 .rev = 0,
770 .funcs = &dce_virtual_ip_funcs,
771};
772