1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <drm/drmP.h>
29#include "radeon.h"
30#include <drm/radeon_drm.h>
31#include "radeon_asic.h"
32
33#include <linux/vga_switcheroo.h>
34#include <linux/slab.h>
35
36
37
38
39
40
41
42
43
44
45
46
47int radeon_driver_unload_kms(struct drm_device *dev)
48{
49 struct radeon_device *rdev = dev->dev_private;
50
51 if (rdev == NULL)
52 return 0;
53 if (rdev->rmmio == NULL)
54 goto done_free;
55 radeon_acpi_fini(rdev);
56 radeon_modeset_fini(rdev);
57 radeon_device_fini(rdev);
58
59done_free:
60 kfree(rdev);
61 dev->dev_private = NULL;
62 return 0;
63}
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78int radeon_driver_load_kms(struct drm_device *dev, unsigned long flags)
79{
80 struct radeon_device *rdev;
81 int r, acpi_status;
82
83 rdev = kzalloc(sizeof(struct radeon_device), GFP_KERNEL);
84 if (rdev == NULL) {
85 return -ENOMEM;
86 }
87 dev->dev_private = (void *)rdev;
88
89
90 if (drm_pci_device_is_agp(dev)) {
91 flags |= RADEON_IS_AGP;
92 } else if (pci_is_pcie(dev->pdev)) {
93 flags |= RADEON_IS_PCIE;
94 } else {
95 flags |= RADEON_IS_PCI;
96 }
97
98
99
100
101
102
103
104 r = radeon_device_init(rdev, dev, dev->pdev, flags);
105 if (r) {
106 dev_err(&dev->pdev->dev, "Fatal error during GPU init\n");
107 goto out;
108 }
109
110
111
112
113
114 r = radeon_modeset_init(rdev);
115 if (r)
116 dev_err(&dev->pdev->dev, "Fatal error during modeset init\n");
117
118
119
120
121 if (!r) {
122 acpi_status = radeon_acpi_init(rdev);
123 if (acpi_status)
124 dev_dbg(&dev->pdev->dev,
125 "Error during ACPI methods call\n");
126 }
127
128out:
129 if (r)
130 radeon_driver_unload_kms(dev);
131 return r;
132}
133
134
135
136
137
138
139
140
141
142
143
144static void radeon_set_filp_rights(struct drm_device *dev,
145 struct drm_file **owner,
146 struct drm_file *applier,
147 uint32_t *value)
148{
149 mutex_lock(&dev->struct_mutex);
150 if (*value == 1) {
151
152 if (!*owner)
153 *owner = applier;
154 } else if (*value == 0) {
155
156 if (*owner == applier)
157 *owner = NULL;
158 }
159 *value = *owner == applier ? 1 : 0;
160 mutex_unlock(&dev->struct_mutex);
161}
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178int radeon_info_ioctl(struct drm_device *dev, void *data, struct drm_file *filp)
179{
180 struct radeon_device *rdev = dev->dev_private;
181 struct drm_radeon_info *info = data;
182 struct radeon_mode_info *minfo = &rdev->mode_info;
183 uint32_t *value, value_tmp, *value_ptr, value_size;
184 uint64_t value64;
185 struct drm_crtc *crtc;
186 int i, found;
187
188 value_ptr = (uint32_t *)((unsigned long)info->value);
189 value = &value_tmp;
190 value_size = sizeof(uint32_t);
191
192 switch (info->request) {
193 case RADEON_INFO_DEVICE_ID:
194 *value = dev->pci_device;
195 break;
196 case RADEON_INFO_NUM_GB_PIPES:
197 *value = rdev->num_gb_pipes;
198 break;
199 case RADEON_INFO_NUM_Z_PIPES:
200 *value = rdev->num_z_pipes;
201 break;
202 case RADEON_INFO_ACCEL_WORKING:
203
204 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK))
205 *value = false;
206 else
207 *value = rdev->accel_working;
208 break;
209 case RADEON_INFO_CRTC_FROM_ID:
210 if (DRM_COPY_FROM_USER(value, value_ptr, sizeof(uint32_t))) {
211 DRM_ERROR("copy_from_user %s:%u\n", __func__, __LINE__);
212 return -EFAULT;
213 }
214 for (i = 0, found = 0; i < rdev->num_crtc; i++) {
215 crtc = (struct drm_crtc *)minfo->crtcs[i];
216 if (crtc && crtc->base.id == *value) {
217 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
218 *value = radeon_crtc->crtc_id;
219 found = 1;
220 break;
221 }
222 }
223 if (!found) {
224 DRM_DEBUG_KMS("unknown crtc id %d\n", *value);
225 return -EINVAL;
226 }
227 break;
228 case RADEON_INFO_ACCEL_WORKING2:
229 *value = rdev->accel_working;
230 break;
231 case RADEON_INFO_TILING_CONFIG:
232 if (rdev->family >= CHIP_TAHITI)
233 *value = rdev->config.si.tile_config;
234 else if (rdev->family >= CHIP_CAYMAN)
235 *value = rdev->config.cayman.tile_config;
236 else if (rdev->family >= CHIP_CEDAR)
237 *value = rdev->config.evergreen.tile_config;
238 else if (rdev->family >= CHIP_RV770)
239 *value = rdev->config.rv770.tile_config;
240 else if (rdev->family >= CHIP_R600)
241 *value = rdev->config.r600.tile_config;
242 else {
243 DRM_DEBUG_KMS("tiling config is r6xx+ only!\n");
244 return -EINVAL;
245 }
246 break;
247 case RADEON_INFO_WANT_HYPERZ:
248
249
250
251
252
253
254 if (DRM_COPY_FROM_USER(value, value_ptr, sizeof(uint32_t))) {
255 DRM_ERROR("copy_from_user %s:%u\n", __func__, __LINE__);
256 return -EFAULT;
257 }
258 if (*value >= 2) {
259 DRM_DEBUG_KMS("WANT_HYPERZ: invalid value %d\n", *value);
260 return -EINVAL;
261 }
262 radeon_set_filp_rights(dev, &rdev->hyperz_filp, filp, value);
263 break;
264 case RADEON_INFO_WANT_CMASK:
265
266 if (DRM_COPY_FROM_USER(value, value_ptr, sizeof(uint32_t))) {
267 DRM_ERROR("copy_from_user %s:%u\n", __func__, __LINE__);
268 return -EFAULT;
269 }
270 if (*value >= 2) {
271 DRM_DEBUG_KMS("WANT_CMASK: invalid value %d\n", *value);
272 return -EINVAL;
273 }
274 radeon_set_filp_rights(dev, &rdev->cmask_filp, filp, value);
275 break;
276 case RADEON_INFO_CLOCK_CRYSTAL_FREQ:
277
278 if (rdev->asic->get_xclk)
279 *value = radeon_get_xclk(rdev) * 10;
280 else
281 *value = rdev->clock.spll.reference_freq * 10;
282 break;
283 case RADEON_INFO_NUM_BACKENDS:
284 if (rdev->family >= CHIP_TAHITI)
285 *value = rdev->config.si.max_backends_per_se *
286 rdev->config.si.max_shader_engines;
287 else if (rdev->family >= CHIP_CAYMAN)
288 *value = rdev->config.cayman.max_backends_per_se *
289 rdev->config.cayman.max_shader_engines;
290 else if (rdev->family >= CHIP_CEDAR)
291 *value = rdev->config.evergreen.max_backends;
292 else if (rdev->family >= CHIP_RV770)
293 *value = rdev->config.rv770.max_backends;
294 else if (rdev->family >= CHIP_R600)
295 *value = rdev->config.r600.max_backends;
296 else {
297 return -EINVAL;
298 }
299 break;
300 case RADEON_INFO_NUM_TILE_PIPES:
301 if (rdev->family >= CHIP_TAHITI)
302 *value = rdev->config.si.max_tile_pipes;
303 else if (rdev->family >= CHIP_CAYMAN)
304 *value = rdev->config.cayman.max_tile_pipes;
305 else if (rdev->family >= CHIP_CEDAR)
306 *value = rdev->config.evergreen.max_tile_pipes;
307 else if (rdev->family >= CHIP_RV770)
308 *value = rdev->config.rv770.max_tile_pipes;
309 else if (rdev->family >= CHIP_R600)
310 *value = rdev->config.r600.max_tile_pipes;
311 else {
312 return -EINVAL;
313 }
314 break;
315 case RADEON_INFO_FUSION_GART_WORKING:
316 *value = 1;
317 break;
318 case RADEON_INFO_BACKEND_MAP:
319 if (rdev->family >= CHIP_TAHITI)
320 *value = rdev->config.si.backend_map;
321 else if (rdev->family >= CHIP_CAYMAN)
322 *value = rdev->config.cayman.backend_map;
323 else if (rdev->family >= CHIP_CEDAR)
324 *value = rdev->config.evergreen.backend_map;
325 else if (rdev->family >= CHIP_RV770)
326 *value = rdev->config.rv770.backend_map;
327 else if (rdev->family >= CHIP_R600)
328 *value = rdev->config.r600.backend_map;
329 else {
330 return -EINVAL;
331 }
332 break;
333 case RADEON_INFO_VA_START:
334
335 if (rdev->family < CHIP_CAYMAN)
336 return -EINVAL;
337 *value = RADEON_VA_RESERVED_SIZE;
338 break;
339 case RADEON_INFO_IB_VM_MAX_SIZE:
340
341 if (rdev->family < CHIP_CAYMAN)
342 return -EINVAL;
343 *value = RADEON_IB_VM_MAX_SIZE;
344 break;
345 case RADEON_INFO_MAX_PIPES:
346 if (rdev->family >= CHIP_TAHITI)
347 *value = rdev->config.si.max_cu_per_sh;
348 else if (rdev->family >= CHIP_CAYMAN)
349 *value = rdev->config.cayman.max_pipes_per_simd;
350 else if (rdev->family >= CHIP_CEDAR)
351 *value = rdev->config.evergreen.max_pipes;
352 else if (rdev->family >= CHIP_RV770)
353 *value = rdev->config.rv770.max_pipes;
354 else if (rdev->family >= CHIP_R600)
355 *value = rdev->config.r600.max_pipes;
356 else {
357 return -EINVAL;
358 }
359 break;
360 case RADEON_INFO_TIMESTAMP:
361 if (rdev->family < CHIP_R600) {
362 DRM_DEBUG_KMS("timestamp is r6xx+ only!\n");
363 return -EINVAL;
364 }
365 value = (uint32_t*)&value64;
366 value_size = sizeof(uint64_t);
367 value64 = radeon_get_gpu_clock_counter(rdev);
368 break;
369 case RADEON_INFO_MAX_SE:
370 if (rdev->family >= CHIP_TAHITI)
371 *value = rdev->config.si.max_shader_engines;
372 else if (rdev->family >= CHIP_CAYMAN)
373 *value = rdev->config.cayman.max_shader_engines;
374 else if (rdev->family >= CHIP_CEDAR)
375 *value = rdev->config.evergreen.num_ses;
376 else
377 *value = 1;
378 break;
379 case RADEON_INFO_MAX_SH_PER_SE:
380 if (rdev->family >= CHIP_TAHITI)
381 *value = rdev->config.si.max_sh_per_se;
382 else
383 return -EINVAL;
384 break;
385 case RADEON_INFO_FASTFB_WORKING:
386 *value = rdev->fastfb_working;
387 break;
388 case RADEON_INFO_RING_WORKING:
389 if (DRM_COPY_FROM_USER(value, value_ptr, sizeof(uint32_t))) {
390 DRM_ERROR("copy_from_user %s:%u\n", __func__, __LINE__);
391 return -EFAULT;
392 }
393 switch (*value) {
394 case RADEON_CS_RING_GFX:
395 case RADEON_CS_RING_COMPUTE:
396 *value = rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready;
397 break;
398 case RADEON_CS_RING_DMA:
399 *value = rdev->ring[R600_RING_TYPE_DMA_INDEX].ready;
400 *value |= rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready;
401 break;
402 case RADEON_CS_RING_UVD:
403 *value = rdev->ring[R600_RING_TYPE_UVD_INDEX].ready;
404 break;
405 default:
406 return -EINVAL;
407 }
408 break;
409 case RADEON_INFO_SI_TILE_MODE_ARRAY:
410 if (rdev->family < CHIP_TAHITI) {
411 DRM_DEBUG_KMS("tile mode array is si only!\n");
412 return -EINVAL;
413 }
414 value = rdev->config.si.tile_mode_array;
415 value_size = sizeof(uint32_t)*32;
416 break;
417 default:
418 DRM_DEBUG_KMS("Invalid request %d\n", info->request);
419 return -EINVAL;
420 }
421 if (DRM_COPY_TO_USER(value_ptr, (char*)value, value_size)) {
422 DRM_ERROR("copy_to_user %s:%u\n", __func__, __LINE__);
423 return -EFAULT;
424 }
425 return 0;
426}
427
428
429
430
431
432
433
434
435
436
437
438
439
440int radeon_driver_firstopen_kms(struct drm_device *dev)
441{
442 return 0;
443}
444
445
446
447
448
449
450
451
452void radeon_driver_lastclose_kms(struct drm_device *dev)
453{
454 vga_switcheroo_process_delayed_switch();
455}
456
457
458
459
460
461
462
463
464
465
466int radeon_driver_open_kms(struct drm_device *dev, struct drm_file *file_priv)
467{
468 struct radeon_device *rdev = dev->dev_private;
469
470 file_priv->driver_priv = NULL;
471
472
473 if (rdev->family >= CHIP_CAYMAN) {
474 struct radeon_fpriv *fpriv;
475 struct radeon_bo_va *bo_va;
476 int r;
477
478 fpriv = kzalloc(sizeof(*fpriv), GFP_KERNEL);
479 if (unlikely(!fpriv)) {
480 return -ENOMEM;
481 }
482
483 radeon_vm_init(rdev, &fpriv->vm);
484
485
486
487 bo_va = radeon_vm_bo_add(rdev, &fpriv->vm,
488 rdev->ring_tmp_bo.bo);
489 r = radeon_vm_bo_set_addr(rdev, bo_va, RADEON_VA_IB_OFFSET,
490 RADEON_VM_PAGE_READABLE |
491 RADEON_VM_PAGE_SNOOPED);
492 if (r) {
493 radeon_vm_fini(rdev, &fpriv->vm);
494 kfree(fpriv);
495 return r;
496 }
497
498 file_priv->driver_priv = fpriv;
499 }
500 return 0;
501}
502
503
504
505
506
507
508
509
510
511void radeon_driver_postclose_kms(struct drm_device *dev,
512 struct drm_file *file_priv)
513{
514 struct radeon_device *rdev = dev->dev_private;
515
516
517 if (rdev->family >= CHIP_CAYMAN && file_priv->driver_priv) {
518 struct radeon_fpriv *fpriv = file_priv->driver_priv;
519 struct radeon_bo_va *bo_va;
520 int r;
521
522 r = radeon_bo_reserve(rdev->ring_tmp_bo.bo, false);
523 if (!r) {
524 bo_va = radeon_vm_bo_find(&fpriv->vm,
525 rdev->ring_tmp_bo.bo);
526 if (bo_va)
527 radeon_vm_bo_rmv(rdev, bo_va);
528 radeon_bo_unreserve(rdev->ring_tmp_bo.bo);
529 }
530
531 radeon_vm_fini(rdev, &fpriv->vm);
532 kfree(fpriv);
533 file_priv->driver_priv = NULL;
534 }
535}
536
537
538
539
540
541
542
543
544
545
546void radeon_driver_preclose_kms(struct drm_device *dev,
547 struct drm_file *file_priv)
548{
549 struct radeon_device *rdev = dev->dev_private;
550 if (rdev->hyperz_filp == file_priv)
551 rdev->hyperz_filp = NULL;
552 if (rdev->cmask_filp == file_priv)
553 rdev->cmask_filp = NULL;
554 radeon_uvd_free_handles(rdev, file_priv);
555}
556
557
558
559
560
561
562
563
564
565
566
567
568
569u32 radeon_get_vblank_counter_kms(struct drm_device *dev, int crtc)
570{
571 struct radeon_device *rdev = dev->dev_private;
572
573 if (crtc < 0 || crtc >= rdev->num_crtc) {
574 DRM_ERROR("Invalid crtc %d\n", crtc);
575 return -EINVAL;
576 }
577
578 return radeon_get_vblank_counter(rdev, crtc);
579}
580
581
582
583
584
585
586
587
588
589
590int radeon_enable_vblank_kms(struct drm_device *dev, int crtc)
591{
592 struct radeon_device *rdev = dev->dev_private;
593 unsigned long irqflags;
594 int r;
595
596 if (crtc < 0 || crtc >= rdev->num_crtc) {
597 DRM_ERROR("Invalid crtc %d\n", crtc);
598 return -EINVAL;
599 }
600
601 spin_lock_irqsave(&rdev->irq.lock, irqflags);
602 rdev->irq.crtc_vblank_int[crtc] = true;
603 r = radeon_irq_set(rdev);
604 spin_unlock_irqrestore(&rdev->irq.lock, irqflags);
605 return r;
606}
607
608
609
610
611
612
613
614
615
616void radeon_disable_vblank_kms(struct drm_device *dev, int crtc)
617{
618 struct radeon_device *rdev = dev->dev_private;
619 unsigned long irqflags;
620
621 if (crtc < 0 || crtc >= rdev->num_crtc) {
622 DRM_ERROR("Invalid crtc %d\n", crtc);
623 return;
624 }
625
626 spin_lock_irqsave(&rdev->irq.lock, irqflags);
627 rdev->irq.crtc_vblank_int[crtc] = false;
628 radeon_irq_set(rdev);
629 spin_unlock_irqrestore(&rdev->irq.lock, irqflags);
630}
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645int radeon_get_vblank_timestamp_kms(struct drm_device *dev, int crtc,
646 int *max_error,
647 struct timeval *vblank_time,
648 unsigned flags)
649{
650 struct drm_crtc *drmcrtc;
651 struct radeon_device *rdev = dev->dev_private;
652
653 if (crtc < 0 || crtc >= dev->num_crtcs) {
654 DRM_ERROR("Invalid crtc %d\n", crtc);
655 return -EINVAL;
656 }
657
658
659 drmcrtc = &rdev->mode_info.crtcs[crtc]->base;
660
661
662 return drm_calc_vbltimestamp_from_scanoutpos(dev, crtc, max_error,
663 vblank_time, flags,
664 drmcrtc);
665}
666
667
668
669
670int radeon_dma_ioctl_kms(struct drm_device *dev, void *data,
671 struct drm_file *file_priv)
672{
673
674 return -EINVAL;
675}
676
677#define KMS_INVALID_IOCTL(name) \
678int name(struct drm_device *dev, void *data, struct drm_file *file_priv)\
679{ \
680 DRM_ERROR("invalid ioctl with kms %s\n", __func__); \
681 return -EINVAL; \
682}
683
684
685
686
687KMS_INVALID_IOCTL(radeon_cp_init_kms)
688KMS_INVALID_IOCTL(radeon_cp_start_kms)
689KMS_INVALID_IOCTL(radeon_cp_stop_kms)
690KMS_INVALID_IOCTL(radeon_cp_reset_kms)
691KMS_INVALID_IOCTL(radeon_cp_idle_kms)
692KMS_INVALID_IOCTL(radeon_cp_resume_kms)
693KMS_INVALID_IOCTL(radeon_engine_reset_kms)
694KMS_INVALID_IOCTL(radeon_fullscreen_kms)
695KMS_INVALID_IOCTL(radeon_cp_swap_kms)
696KMS_INVALID_IOCTL(radeon_cp_clear_kms)
697KMS_INVALID_IOCTL(radeon_cp_vertex_kms)
698KMS_INVALID_IOCTL(radeon_cp_indices_kms)
699KMS_INVALID_IOCTL(radeon_cp_texture_kms)
700KMS_INVALID_IOCTL(radeon_cp_stipple_kms)
701KMS_INVALID_IOCTL(radeon_cp_indirect_kms)
702KMS_INVALID_IOCTL(radeon_cp_vertex2_kms)
703KMS_INVALID_IOCTL(radeon_cp_cmdbuf_kms)
704KMS_INVALID_IOCTL(radeon_cp_getparam_kms)
705KMS_INVALID_IOCTL(radeon_cp_flip_kms)
706KMS_INVALID_IOCTL(radeon_mem_alloc_kms)
707KMS_INVALID_IOCTL(radeon_mem_free_kms)
708KMS_INVALID_IOCTL(radeon_mem_init_heap_kms)
709KMS_INVALID_IOCTL(radeon_irq_emit_kms)
710KMS_INVALID_IOCTL(radeon_irq_wait_kms)
711KMS_INVALID_IOCTL(radeon_cp_setparam_kms)
712KMS_INVALID_IOCTL(radeon_surface_alloc_kms)
713KMS_INVALID_IOCTL(radeon_surface_free_kms)
714
715
716struct drm_ioctl_desc radeon_ioctls_kms[] = {
717 DRM_IOCTL_DEF_DRV(RADEON_CP_INIT, radeon_cp_init_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
718 DRM_IOCTL_DEF_DRV(RADEON_CP_START, radeon_cp_start_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
719 DRM_IOCTL_DEF_DRV(RADEON_CP_STOP, radeon_cp_stop_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
720 DRM_IOCTL_DEF_DRV(RADEON_CP_RESET, radeon_cp_reset_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
721 DRM_IOCTL_DEF_DRV(RADEON_CP_IDLE, radeon_cp_idle_kms, DRM_AUTH),
722 DRM_IOCTL_DEF_DRV(RADEON_CP_RESUME, radeon_cp_resume_kms, DRM_AUTH),
723 DRM_IOCTL_DEF_DRV(RADEON_RESET, radeon_engine_reset_kms, DRM_AUTH),
724 DRM_IOCTL_DEF_DRV(RADEON_FULLSCREEN, radeon_fullscreen_kms, DRM_AUTH),
725 DRM_IOCTL_DEF_DRV(RADEON_SWAP, radeon_cp_swap_kms, DRM_AUTH),
726 DRM_IOCTL_DEF_DRV(RADEON_CLEAR, radeon_cp_clear_kms, DRM_AUTH),
727 DRM_IOCTL_DEF_DRV(RADEON_VERTEX, radeon_cp_vertex_kms, DRM_AUTH),
728 DRM_IOCTL_DEF_DRV(RADEON_INDICES, radeon_cp_indices_kms, DRM_AUTH),
729 DRM_IOCTL_DEF_DRV(RADEON_TEXTURE, radeon_cp_texture_kms, DRM_AUTH),
730 DRM_IOCTL_DEF_DRV(RADEON_STIPPLE, radeon_cp_stipple_kms, DRM_AUTH),
731 DRM_IOCTL_DEF_DRV(RADEON_INDIRECT, radeon_cp_indirect_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
732 DRM_IOCTL_DEF_DRV(RADEON_VERTEX2, radeon_cp_vertex2_kms, DRM_AUTH),
733 DRM_IOCTL_DEF_DRV(RADEON_CMDBUF, radeon_cp_cmdbuf_kms, DRM_AUTH),
734 DRM_IOCTL_DEF_DRV(RADEON_GETPARAM, radeon_cp_getparam_kms, DRM_AUTH),
735 DRM_IOCTL_DEF_DRV(RADEON_FLIP, radeon_cp_flip_kms, DRM_AUTH),
736 DRM_IOCTL_DEF_DRV(RADEON_ALLOC, radeon_mem_alloc_kms, DRM_AUTH),
737 DRM_IOCTL_DEF_DRV(RADEON_FREE, radeon_mem_free_kms, DRM_AUTH),
738 DRM_IOCTL_DEF_DRV(RADEON_INIT_HEAP, radeon_mem_init_heap_kms, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
739 DRM_IOCTL_DEF_DRV(RADEON_IRQ_EMIT, radeon_irq_emit_kms, DRM_AUTH),
740 DRM_IOCTL_DEF_DRV(RADEON_IRQ_WAIT, radeon_irq_wait_kms, DRM_AUTH),
741 DRM_IOCTL_DEF_DRV(RADEON_SETPARAM, radeon_cp_setparam_kms, DRM_AUTH),
742 DRM_IOCTL_DEF_DRV(RADEON_SURF_ALLOC, radeon_surface_alloc_kms, DRM_AUTH),
743 DRM_IOCTL_DEF_DRV(RADEON_SURF_FREE, radeon_surface_free_kms, DRM_AUTH),
744
745 DRM_IOCTL_DEF_DRV(RADEON_GEM_INFO, radeon_gem_info_ioctl, DRM_AUTH|DRM_UNLOCKED),
746 DRM_IOCTL_DEF_DRV(RADEON_GEM_CREATE, radeon_gem_create_ioctl, DRM_AUTH|DRM_UNLOCKED),
747 DRM_IOCTL_DEF_DRV(RADEON_GEM_MMAP, radeon_gem_mmap_ioctl, DRM_AUTH|DRM_UNLOCKED),
748 DRM_IOCTL_DEF_DRV(RADEON_GEM_SET_DOMAIN, radeon_gem_set_domain_ioctl, DRM_AUTH|DRM_UNLOCKED),
749 DRM_IOCTL_DEF_DRV(RADEON_GEM_PREAD, radeon_gem_pread_ioctl, DRM_AUTH|DRM_UNLOCKED),
750 DRM_IOCTL_DEF_DRV(RADEON_GEM_PWRITE, radeon_gem_pwrite_ioctl, DRM_AUTH|DRM_UNLOCKED),
751 DRM_IOCTL_DEF_DRV(RADEON_GEM_WAIT_IDLE, radeon_gem_wait_idle_ioctl, DRM_AUTH|DRM_UNLOCKED),
752 DRM_IOCTL_DEF_DRV(RADEON_CS, radeon_cs_ioctl, DRM_AUTH|DRM_UNLOCKED),
753 DRM_IOCTL_DEF_DRV(RADEON_INFO, radeon_info_ioctl, DRM_AUTH|DRM_UNLOCKED),
754 DRM_IOCTL_DEF_DRV(RADEON_GEM_SET_TILING, radeon_gem_set_tiling_ioctl, DRM_AUTH|DRM_UNLOCKED),
755 DRM_IOCTL_DEF_DRV(RADEON_GEM_GET_TILING, radeon_gem_get_tiling_ioctl, DRM_AUTH|DRM_UNLOCKED),
756 DRM_IOCTL_DEF_DRV(RADEON_GEM_BUSY, radeon_gem_busy_ioctl, DRM_AUTH|DRM_UNLOCKED),
757 DRM_IOCTL_DEF_DRV(RADEON_GEM_VA, radeon_gem_va_ioctl, DRM_AUTH|DRM_UNLOCKED),
758};
759int radeon_max_kms_ioctl = DRM_ARRAY_SIZE(radeon_ioctls_kms);
760