1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29#include <linux/firmware.h>
30#include <linux/module.h>
31#include <linux/pci.h>
32#include <linux/slab.h>
33#include <linux/seq_file.h>
34
35#include <drm/drm_debugfs.h>
36#include <drm/drm_device.h>
37#include <drm/drm_vblank.h>
38#include <drm/radeon_drm.h>
39
40#include "atom.h"
41#include "avivod.h"
42#include "evergreen.h"
43#include "r600.h"
44#include "r600d.h"
45#include "rv770.h"
46#include "radeon.h"
47#include "radeon_asic.h"
48#include "radeon_audio.h"
49#include "radeon_mode.h"
50#include "radeon_ucode.h"
51
52
53MODULE_FIRMWARE("radeon/R600_pfp.bin");
54MODULE_FIRMWARE("radeon/R600_me.bin");
55MODULE_FIRMWARE("radeon/RV610_pfp.bin");
56MODULE_FIRMWARE("radeon/RV610_me.bin");
57MODULE_FIRMWARE("radeon/RV630_pfp.bin");
58MODULE_FIRMWARE("radeon/RV630_me.bin");
59MODULE_FIRMWARE("radeon/RV620_pfp.bin");
60MODULE_FIRMWARE("radeon/RV620_me.bin");
61MODULE_FIRMWARE("radeon/RV635_pfp.bin");
62MODULE_FIRMWARE("radeon/RV635_me.bin");
63MODULE_FIRMWARE("radeon/RV670_pfp.bin");
64MODULE_FIRMWARE("radeon/RV670_me.bin");
65MODULE_FIRMWARE("radeon/RS780_pfp.bin");
66MODULE_FIRMWARE("radeon/RS780_me.bin");
67MODULE_FIRMWARE("radeon/RV770_pfp.bin");
68MODULE_FIRMWARE("radeon/RV770_me.bin");
69MODULE_FIRMWARE("radeon/RV770_smc.bin");
70MODULE_FIRMWARE("radeon/RV730_pfp.bin");
71MODULE_FIRMWARE("radeon/RV730_me.bin");
72MODULE_FIRMWARE("radeon/RV730_smc.bin");
73MODULE_FIRMWARE("radeon/RV740_smc.bin");
74MODULE_FIRMWARE("radeon/RV710_pfp.bin");
75MODULE_FIRMWARE("radeon/RV710_me.bin");
76MODULE_FIRMWARE("radeon/RV710_smc.bin");
77MODULE_FIRMWARE("radeon/R600_rlc.bin");
78MODULE_FIRMWARE("radeon/R700_rlc.bin");
79MODULE_FIRMWARE("radeon/CEDAR_pfp.bin");
80MODULE_FIRMWARE("radeon/CEDAR_me.bin");
81MODULE_FIRMWARE("radeon/CEDAR_rlc.bin");
82MODULE_FIRMWARE("radeon/CEDAR_smc.bin");
83MODULE_FIRMWARE("radeon/REDWOOD_pfp.bin");
84MODULE_FIRMWARE("radeon/REDWOOD_me.bin");
85MODULE_FIRMWARE("radeon/REDWOOD_rlc.bin");
86MODULE_FIRMWARE("radeon/REDWOOD_smc.bin");
87MODULE_FIRMWARE("radeon/JUNIPER_pfp.bin");
88MODULE_FIRMWARE("radeon/JUNIPER_me.bin");
89MODULE_FIRMWARE("radeon/JUNIPER_rlc.bin");
90MODULE_FIRMWARE("radeon/JUNIPER_smc.bin");
91MODULE_FIRMWARE("radeon/CYPRESS_pfp.bin");
92MODULE_FIRMWARE("radeon/CYPRESS_me.bin");
93MODULE_FIRMWARE("radeon/CYPRESS_rlc.bin");
94MODULE_FIRMWARE("radeon/CYPRESS_smc.bin");
95MODULE_FIRMWARE("radeon/PALM_pfp.bin");
96MODULE_FIRMWARE("radeon/PALM_me.bin");
97MODULE_FIRMWARE("radeon/SUMO_rlc.bin");
98MODULE_FIRMWARE("radeon/SUMO_pfp.bin");
99MODULE_FIRMWARE("radeon/SUMO_me.bin");
100MODULE_FIRMWARE("radeon/SUMO2_pfp.bin");
101MODULE_FIRMWARE("radeon/SUMO2_me.bin");
102
103static const u32 crtc_offsets[2] =
104{
105 0,
106 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL
107};
108
109int r600_debugfs_mc_info_init(struct radeon_device *rdev);
110
111
112int r600_mc_wait_for_idle(struct radeon_device *rdev);
113static void r600_gpu_init(struct radeon_device *rdev);
114void r600_fini(struct radeon_device *rdev);
115void r600_irq_disable(struct radeon_device *rdev);
116static void r600_pcie_gen2_enable(struct radeon_device *rdev);
117
118
119
120
121u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg)
122{
123 unsigned long flags;
124 u32 r;
125
126 spin_lock_irqsave(&rdev->rcu_idx_lock, flags);
127 WREG32(R600_RCU_INDEX, ((reg) & 0x1fff));
128 r = RREG32(R600_RCU_DATA);
129 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags);
130 return r;
131}
132
133void r600_rcu_wreg(struct radeon_device *rdev, u32 reg, u32 v)
134{
135 unsigned long flags;
136
137 spin_lock_irqsave(&rdev->rcu_idx_lock, flags);
138 WREG32(R600_RCU_INDEX, ((reg) & 0x1fff));
139 WREG32(R600_RCU_DATA, (v));
140 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags);
141}
142
143u32 r600_uvd_ctx_rreg(struct radeon_device *rdev, u32 reg)
144{
145 unsigned long flags;
146 u32 r;
147
148 spin_lock_irqsave(&rdev->uvd_idx_lock, flags);
149 WREG32(R600_UVD_CTX_INDEX, ((reg) & 0x1ff));
150 r = RREG32(R600_UVD_CTX_DATA);
151 spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags);
152 return r;
153}
154
155void r600_uvd_ctx_wreg(struct radeon_device *rdev, u32 reg, u32 v)
156{
157 unsigned long flags;
158
159 spin_lock_irqsave(&rdev->uvd_idx_lock, flags);
160 WREG32(R600_UVD_CTX_INDEX, ((reg) & 0x1ff));
161 WREG32(R600_UVD_CTX_DATA, (v));
162 spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags);
163}
164
165
166
167
168
169
170
171
172
173
174
175int r600_get_allowed_info_register(struct radeon_device *rdev,
176 u32 reg, u32 *val)
177{
178 switch (reg) {
179 case GRBM_STATUS:
180 case GRBM_STATUS2:
181 case R_000E50_SRBM_STATUS:
182 case DMA_STATUS_REG:
183 case UVD_STATUS:
184 *val = RREG32(reg);
185 return 0;
186 default:
187 return -EINVAL;
188 }
189}
190
191
192
193
194
195
196
197
198
199u32 r600_get_xclk(struct radeon_device *rdev)
200{
201 return rdev->clock.spll.reference_freq;
202}
203
204int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
205{
206 unsigned fb_div = 0, ref_div, vclk_div = 0, dclk_div = 0;
207 int r;
208
209
210 WREG32_P(CG_UPLL_FUNC_CNTL_2,
211 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
212 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
213
214
215 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~(
216 UPLL_RESET_MASK | UPLL_SLEEP_MASK | UPLL_CTLREQ_MASK));
217
218 if (rdev->family >= CHIP_RS780)
219 WREG32_P(GFX_MACRO_BYPASS_CNTL, UPLL_BYPASS_CNTL,
220 ~UPLL_BYPASS_CNTL);
221
222 if (!vclk || !dclk) {
223
224 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
225 return 0;
226 }
227
228 if (rdev->clock.spll.reference_freq == 10000)
229 ref_div = 34;
230 else
231 ref_div = 4;
232
233 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000,
234 ref_div + 1, 0xFFF, 2, 30, ~0,
235 &fb_div, &vclk_div, &dclk_div);
236 if (r)
237 return r;
238
239 if (rdev->family >= CHIP_RV670 && rdev->family < CHIP_RS780)
240 fb_div >>= 1;
241 else
242 fb_div |= 1;
243
244 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
245 if (r)
246 return r;
247
248
249 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
250
251
252 if (rdev->family >= CHIP_RS780)
253 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_REFCLK_SRC_SEL_MASK,
254 ~UPLL_REFCLK_SRC_SEL_MASK);
255
256
257 WREG32_P(CG_UPLL_FUNC_CNTL,
258 UPLL_FB_DIV(fb_div) |
259 UPLL_REF_DIV(ref_div),
260 ~(UPLL_FB_DIV_MASK | UPLL_REF_DIV_MASK));
261 WREG32_P(CG_UPLL_FUNC_CNTL_2,
262 UPLL_SW_HILEN(vclk_div >> 1) |
263 UPLL_SW_LOLEN((vclk_div >> 1) + (vclk_div & 1)) |
264 UPLL_SW_HILEN2(dclk_div >> 1) |
265 UPLL_SW_LOLEN2((dclk_div >> 1) + (dclk_div & 1)) |
266 UPLL_DIVEN_MASK | UPLL_DIVEN2_MASK,
267 ~UPLL_SW_MASK);
268
269
270 mdelay(15);
271
272
273 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
274
275 mdelay(15);
276
277
278 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
279
280 if (rdev->family >= CHIP_RS780)
281 WREG32_P(GFX_MACRO_BYPASS_CNTL, 0, ~UPLL_BYPASS_CNTL);
282
283 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
284 if (r)
285 return r;
286
287
288 WREG32_P(CG_UPLL_FUNC_CNTL_2,
289 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
290 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
291
292 mdelay(100);
293
294 return 0;
295}
296
297void dce3_program_fmt(struct drm_encoder *encoder)
298{
299 struct drm_device *dev = encoder->dev;
300 struct radeon_device *rdev = dev->dev_private;
301 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
302 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
303 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
304 int bpc = 0;
305 u32 tmp = 0;
306 enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
307
308 if (connector) {
309 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
310 bpc = radeon_get_monitor_bpc(connector);
311 dither = radeon_connector->dither;
312 }
313
314
315 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
316 return;
317
318
319 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
320 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
321 return;
322
323 if (bpc == 0)
324 return;
325
326 switch (bpc) {
327 case 6:
328 if (dither == RADEON_FMT_DITHER_ENABLE)
329
330 tmp |= FMT_SPATIAL_DITHER_EN;
331 else
332 tmp |= FMT_TRUNCATE_EN;
333 break;
334 case 8:
335 if (dither == RADEON_FMT_DITHER_ENABLE)
336
337 tmp |= (FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
338 else
339 tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
340 break;
341 case 10:
342 default:
343
344 break;
345 }
346
347 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
348}
349
350
351int rv6xx_get_temp(struct radeon_device *rdev)
352{
353 u32 temp = (RREG32(CG_THERMAL_STATUS) & ASIC_T_MASK) >>
354 ASIC_T_SHIFT;
355 int actual_temp = temp & 0xff;
356
357 if (temp & 0x100)
358 actual_temp -= 256;
359
360 return actual_temp * 1000;
361}
362
363void r600_pm_get_dynpm_state(struct radeon_device *rdev)
364{
365 int i;
366
367 rdev->pm.dynpm_can_upclock = true;
368 rdev->pm.dynpm_can_downclock = true;
369
370
371 if ((rdev->flags & RADEON_IS_IGP) || (rdev->family == CHIP_R600)) {
372 int min_power_state_index = 0;
373
374 if (rdev->pm.num_power_states > 2)
375 min_power_state_index = 1;
376
377 switch (rdev->pm.dynpm_planned_action) {
378 case DYNPM_ACTION_MINIMUM:
379 rdev->pm.requested_power_state_index = min_power_state_index;
380 rdev->pm.requested_clock_mode_index = 0;
381 rdev->pm.dynpm_can_downclock = false;
382 break;
383 case DYNPM_ACTION_DOWNCLOCK:
384 if (rdev->pm.current_power_state_index == min_power_state_index) {
385 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
386 rdev->pm.dynpm_can_downclock = false;
387 } else {
388 if (rdev->pm.active_crtc_count > 1) {
389 for (i = 0; i < rdev->pm.num_power_states; i++) {
390 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
391 continue;
392 else if (i >= rdev->pm.current_power_state_index) {
393 rdev->pm.requested_power_state_index =
394 rdev->pm.current_power_state_index;
395 break;
396 } else {
397 rdev->pm.requested_power_state_index = i;
398 break;
399 }
400 }
401 } else {
402 if (rdev->pm.current_power_state_index == 0)
403 rdev->pm.requested_power_state_index =
404 rdev->pm.num_power_states - 1;
405 else
406 rdev->pm.requested_power_state_index =
407 rdev->pm.current_power_state_index - 1;
408 }
409 }
410 rdev->pm.requested_clock_mode_index = 0;
411
412 if ((rdev->pm.active_crtc_count > 0) &&
413 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
414 clock_info[rdev->pm.requested_clock_mode_index].flags &
415 RADEON_PM_MODE_NO_DISPLAY)) {
416 rdev->pm.requested_power_state_index++;
417 }
418 break;
419 case DYNPM_ACTION_UPCLOCK:
420 if (rdev->pm.current_power_state_index == (rdev->pm.num_power_states - 1)) {
421 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
422 rdev->pm.dynpm_can_upclock = false;
423 } else {
424 if (rdev->pm.active_crtc_count > 1) {
425 for (i = (rdev->pm.num_power_states - 1); i >= 0; i--) {
426 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
427 continue;
428 else if (i <= rdev->pm.current_power_state_index) {
429 rdev->pm.requested_power_state_index =
430 rdev->pm.current_power_state_index;
431 break;
432 } else {
433 rdev->pm.requested_power_state_index = i;
434 break;
435 }
436 }
437 } else
438 rdev->pm.requested_power_state_index =
439 rdev->pm.current_power_state_index + 1;
440 }
441 rdev->pm.requested_clock_mode_index = 0;
442 break;
443 case DYNPM_ACTION_DEFAULT:
444 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
445 rdev->pm.requested_clock_mode_index = 0;
446 rdev->pm.dynpm_can_upclock = false;
447 break;
448 case DYNPM_ACTION_NONE:
449 default:
450 DRM_ERROR("Requested mode for not defined action\n");
451 return;
452 }
453 } else {
454
455
456
457 if (rdev->pm.active_crtc_count > 1) {
458 rdev->pm.requested_power_state_index = -1;
459
460 for (i = 1; i < rdev->pm.num_power_states; i++) {
461 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
462 continue;
463 else if ((rdev->pm.power_state[i].type == POWER_STATE_TYPE_PERFORMANCE) ||
464 (rdev->pm.power_state[i].type == POWER_STATE_TYPE_BATTERY)) {
465 rdev->pm.requested_power_state_index = i;
466 break;
467 }
468 }
469
470 if (rdev->pm.requested_power_state_index == -1)
471 rdev->pm.requested_power_state_index = 0;
472 } else
473 rdev->pm.requested_power_state_index = 1;
474
475 switch (rdev->pm.dynpm_planned_action) {
476 case DYNPM_ACTION_MINIMUM:
477 rdev->pm.requested_clock_mode_index = 0;
478 rdev->pm.dynpm_can_downclock = false;
479 break;
480 case DYNPM_ACTION_DOWNCLOCK:
481 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
482 if (rdev->pm.current_clock_mode_index == 0) {
483 rdev->pm.requested_clock_mode_index = 0;
484 rdev->pm.dynpm_can_downclock = false;
485 } else
486 rdev->pm.requested_clock_mode_index =
487 rdev->pm.current_clock_mode_index - 1;
488 } else {
489 rdev->pm.requested_clock_mode_index = 0;
490 rdev->pm.dynpm_can_downclock = false;
491 }
492
493 if ((rdev->pm.active_crtc_count > 0) &&
494 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
495 clock_info[rdev->pm.requested_clock_mode_index].flags &
496 RADEON_PM_MODE_NO_DISPLAY)) {
497 rdev->pm.requested_clock_mode_index++;
498 }
499 break;
500 case DYNPM_ACTION_UPCLOCK:
501 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
502 if (rdev->pm.current_clock_mode_index ==
503 (rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1)) {
504 rdev->pm.requested_clock_mode_index = rdev->pm.current_clock_mode_index;
505 rdev->pm.dynpm_can_upclock = false;
506 } else
507 rdev->pm.requested_clock_mode_index =
508 rdev->pm.current_clock_mode_index + 1;
509 } else {
510 rdev->pm.requested_clock_mode_index =
511 rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1;
512 rdev->pm.dynpm_can_upclock = false;
513 }
514 break;
515 case DYNPM_ACTION_DEFAULT:
516 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
517 rdev->pm.requested_clock_mode_index = 0;
518 rdev->pm.dynpm_can_upclock = false;
519 break;
520 case DYNPM_ACTION_NONE:
521 default:
522 DRM_ERROR("Requested mode for not defined action\n");
523 return;
524 }
525 }
526
527 DRM_DEBUG_DRIVER("Requested: e: %d m: %d p: %d\n",
528 rdev->pm.power_state[rdev->pm.requested_power_state_index].
529 clock_info[rdev->pm.requested_clock_mode_index].sclk,
530 rdev->pm.power_state[rdev->pm.requested_power_state_index].
531 clock_info[rdev->pm.requested_clock_mode_index].mclk,
532 rdev->pm.power_state[rdev->pm.requested_power_state_index].
533 pcie_lanes);
534}
535
536void rs780_pm_init_profile(struct radeon_device *rdev)
537{
538 if (rdev->pm.num_power_states == 2) {
539
540 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
541 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
542 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
543 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
544
545 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0;
546 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0;
547 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
548 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
549
550 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0;
551 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 0;
552 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
553 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
554
555 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0;
556 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
557 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
558 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
559
560 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 0;
561 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
562 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
563 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
564
565 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0;
566 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
567 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
568 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
569
570 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0;
571 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 1;
572 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
573 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
574 } else if (rdev->pm.num_power_states == 3) {
575
576 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
577 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
578 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
579 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
580
581 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
582 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
583 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
584 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
585
586 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
587 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
588 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
589 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
590
591 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
592 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 2;
593 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
594 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
595
596 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 1;
597 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 1;
598 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
599 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
600
601 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 1;
602 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 1;
603 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
604 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
605
606 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 1;
607 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
608 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
609 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
610 } else {
611
612 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
613 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
614 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
615 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
616
617 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 2;
618 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 2;
619 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
620 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
621
622 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 2;
623 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 2;
624 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
625 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
626
627 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 2;
628 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 3;
629 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
630 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
631
632 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
633 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
634 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
635 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
636
637 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
638 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
639 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
640 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
641
642 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
643 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 3;
644 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
645 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
646 }
647}
648
649void r600_pm_init_profile(struct radeon_device *rdev)
650{
651 int idx;
652
653 if (rdev->family == CHIP_R600) {
654
655
656 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
657 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
658 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
659 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
660
661 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
662 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
663 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
664 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
665
666 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
667 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
668 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
669 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
670
671 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
672 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
673 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
674 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
675
676 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
677 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
678 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
679 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
680
681 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
682 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
683 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
684 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
685
686 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
687 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
688 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
689 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
690 } else {
691 if (rdev->pm.num_power_states < 4) {
692
693 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
694 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
695 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
696 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
697
698 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
699 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
700 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
701 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
702
703 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
704 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
705 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
706 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
707
708 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
709 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
710 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
711 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
712
713 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
714 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 2;
715 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
716 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
717
718 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
719 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 2;
720 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
721 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
722
723 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
724 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
725 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
726 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
727 } else {
728
729 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
730 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
731 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
732 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
733
734 if (rdev->flags & RADEON_IS_MOBILITY)
735 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
736 else
737 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
738 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
739 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
740 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
741 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
742
743 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
744 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
745 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
746 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
747
748 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
749 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
750 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
751 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
752 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
753
754 if (rdev->flags & RADEON_IS_MOBILITY)
755 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1);
756 else
757 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
758 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
759 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
760 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
761 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
762
763 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
764 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
765 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
766 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
767
768 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
769 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
770 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
771 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
772 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
773 }
774 }
775}
776
777void r600_pm_misc(struct radeon_device *rdev)
778{
779 int req_ps_idx = rdev->pm.requested_power_state_index;
780 int req_cm_idx = rdev->pm.requested_clock_mode_index;
781 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
782 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
783
784 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
785
786 if (voltage->voltage == 0xff01)
787 return;
788 if (voltage->voltage != rdev->pm.current_vddc) {
789 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
790 rdev->pm.current_vddc = voltage->voltage;
791 DRM_DEBUG_DRIVER("Setting: v: %d\n", voltage->voltage);
792 }
793 }
794}
795
796bool r600_gui_idle(struct radeon_device *rdev)
797{
798 if (RREG32(GRBM_STATUS) & GUI_ACTIVE)
799 return false;
800 else
801 return true;
802}
803
804
805bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
806{
807 bool connected = false;
808
809 if (ASIC_IS_DCE3(rdev)) {
810 switch (hpd) {
811 case RADEON_HPD_1:
812 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
813 connected = true;
814 break;
815 case RADEON_HPD_2:
816 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
817 connected = true;
818 break;
819 case RADEON_HPD_3:
820 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
821 connected = true;
822 break;
823 case RADEON_HPD_4:
824 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
825 connected = true;
826 break;
827
828 case RADEON_HPD_5:
829 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
830 connected = true;
831 break;
832 case RADEON_HPD_6:
833 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
834 connected = true;
835 break;
836 default:
837 break;
838 }
839 } else {
840 switch (hpd) {
841 case RADEON_HPD_1:
842 if (RREG32(DC_HOT_PLUG_DETECT1_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
843 connected = true;
844 break;
845 case RADEON_HPD_2:
846 if (RREG32(DC_HOT_PLUG_DETECT2_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
847 connected = true;
848 break;
849 case RADEON_HPD_3:
850 if (RREG32(DC_HOT_PLUG_DETECT3_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
851 connected = true;
852 break;
853 default:
854 break;
855 }
856 }
857 return connected;
858}
859
860void r600_hpd_set_polarity(struct radeon_device *rdev,
861 enum radeon_hpd_id hpd)
862{
863 u32 tmp;
864 bool connected = r600_hpd_sense(rdev, hpd);
865
866 if (ASIC_IS_DCE3(rdev)) {
867 switch (hpd) {
868 case RADEON_HPD_1:
869 tmp = RREG32(DC_HPD1_INT_CONTROL);
870 if (connected)
871 tmp &= ~DC_HPDx_INT_POLARITY;
872 else
873 tmp |= DC_HPDx_INT_POLARITY;
874 WREG32(DC_HPD1_INT_CONTROL, tmp);
875 break;
876 case RADEON_HPD_2:
877 tmp = RREG32(DC_HPD2_INT_CONTROL);
878 if (connected)
879 tmp &= ~DC_HPDx_INT_POLARITY;
880 else
881 tmp |= DC_HPDx_INT_POLARITY;
882 WREG32(DC_HPD2_INT_CONTROL, tmp);
883 break;
884 case RADEON_HPD_3:
885 tmp = RREG32(DC_HPD3_INT_CONTROL);
886 if (connected)
887 tmp &= ~DC_HPDx_INT_POLARITY;
888 else
889 tmp |= DC_HPDx_INT_POLARITY;
890 WREG32(DC_HPD3_INT_CONTROL, tmp);
891 break;
892 case RADEON_HPD_4:
893 tmp = RREG32(DC_HPD4_INT_CONTROL);
894 if (connected)
895 tmp &= ~DC_HPDx_INT_POLARITY;
896 else
897 tmp |= DC_HPDx_INT_POLARITY;
898 WREG32(DC_HPD4_INT_CONTROL, tmp);
899 break;
900 case RADEON_HPD_5:
901 tmp = RREG32(DC_HPD5_INT_CONTROL);
902 if (connected)
903 tmp &= ~DC_HPDx_INT_POLARITY;
904 else
905 tmp |= DC_HPDx_INT_POLARITY;
906 WREG32(DC_HPD5_INT_CONTROL, tmp);
907 break;
908
909 case RADEON_HPD_6:
910 tmp = RREG32(DC_HPD6_INT_CONTROL);
911 if (connected)
912 tmp &= ~DC_HPDx_INT_POLARITY;
913 else
914 tmp |= DC_HPDx_INT_POLARITY;
915 WREG32(DC_HPD6_INT_CONTROL, tmp);
916 break;
917 default:
918 break;
919 }
920 } else {
921 switch (hpd) {
922 case RADEON_HPD_1:
923 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
924 if (connected)
925 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
926 else
927 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
928 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
929 break;
930 case RADEON_HPD_2:
931 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
932 if (connected)
933 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
934 else
935 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
936 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
937 break;
938 case RADEON_HPD_3:
939 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
940 if (connected)
941 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
942 else
943 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
944 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
945 break;
946 default:
947 break;
948 }
949 }
950}
951
952void r600_hpd_init(struct radeon_device *rdev)
953{
954 struct drm_device *dev = rdev->ddev;
955 struct drm_connector *connector;
956 unsigned enable = 0;
957
958 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
959 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
960
961 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
962 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
963
964
965
966
967 continue;
968 }
969 if (ASIC_IS_DCE3(rdev)) {
970 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) | DC_HPDx_RX_INT_TIMER(0xfa);
971 if (ASIC_IS_DCE32(rdev))
972 tmp |= DC_HPDx_EN;
973
974 switch (radeon_connector->hpd.hpd) {
975 case RADEON_HPD_1:
976 WREG32(DC_HPD1_CONTROL, tmp);
977 break;
978 case RADEON_HPD_2:
979 WREG32(DC_HPD2_CONTROL, tmp);
980 break;
981 case RADEON_HPD_3:
982 WREG32(DC_HPD3_CONTROL, tmp);
983 break;
984 case RADEON_HPD_4:
985 WREG32(DC_HPD4_CONTROL, tmp);
986 break;
987
988 case RADEON_HPD_5:
989 WREG32(DC_HPD5_CONTROL, tmp);
990 break;
991 case RADEON_HPD_6:
992 WREG32(DC_HPD6_CONTROL, tmp);
993 break;
994 default:
995 break;
996 }
997 } else {
998 switch (radeon_connector->hpd.hpd) {
999 case RADEON_HPD_1:
1000 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, DC_HOT_PLUG_DETECTx_EN);
1001 break;
1002 case RADEON_HPD_2:
1003 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, DC_HOT_PLUG_DETECTx_EN);
1004 break;
1005 case RADEON_HPD_3:
1006 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, DC_HOT_PLUG_DETECTx_EN);
1007 break;
1008 default:
1009 break;
1010 }
1011 }
1012 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1013 enable |= 1 << radeon_connector->hpd.hpd;
1014 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1015 }
1016 radeon_irq_kms_enable_hpd(rdev, enable);
1017}
1018
1019void r600_hpd_fini(struct radeon_device *rdev)
1020{
1021 struct drm_device *dev = rdev->ddev;
1022 struct drm_connector *connector;
1023 unsigned disable = 0;
1024
1025 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1026 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1027 if (ASIC_IS_DCE3(rdev)) {
1028 switch (radeon_connector->hpd.hpd) {
1029 case RADEON_HPD_1:
1030 WREG32(DC_HPD1_CONTROL, 0);
1031 break;
1032 case RADEON_HPD_2:
1033 WREG32(DC_HPD2_CONTROL, 0);
1034 break;
1035 case RADEON_HPD_3:
1036 WREG32(DC_HPD3_CONTROL, 0);
1037 break;
1038 case RADEON_HPD_4:
1039 WREG32(DC_HPD4_CONTROL, 0);
1040 break;
1041
1042 case RADEON_HPD_5:
1043 WREG32(DC_HPD5_CONTROL, 0);
1044 break;
1045 case RADEON_HPD_6:
1046 WREG32(DC_HPD6_CONTROL, 0);
1047 break;
1048 default:
1049 break;
1050 }
1051 } else {
1052 switch (radeon_connector->hpd.hpd) {
1053 case RADEON_HPD_1:
1054 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, 0);
1055 break;
1056 case RADEON_HPD_2:
1057 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, 0);
1058 break;
1059 case RADEON_HPD_3:
1060 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, 0);
1061 break;
1062 default:
1063 break;
1064 }
1065 }
1066 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1067 disable |= 1 << radeon_connector->hpd.hpd;
1068 }
1069 radeon_irq_kms_disable_hpd(rdev, disable);
1070}
1071
1072
1073
1074
1075void r600_pcie_gart_tlb_flush(struct radeon_device *rdev)
1076{
1077 unsigned i;
1078 u32 tmp;
1079
1080
1081 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
1082 !(rdev->flags & RADEON_IS_AGP)) {
1083 void __iomem *ptr = (void *)rdev->gart.ptr;
1084
1085
1086
1087
1088
1089
1090 WREG32(HDP_DEBUG1, 0);
1091 readl((void __iomem *)ptr);
1092 } else
1093 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
1094
1095 WREG32(VM_CONTEXT0_INVALIDATION_LOW_ADDR, rdev->mc.gtt_start >> 12);
1096 WREG32(VM_CONTEXT0_INVALIDATION_HIGH_ADDR, (rdev->mc.gtt_end - 1) >> 12);
1097 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
1098 for (i = 0; i < rdev->usec_timeout; i++) {
1099
1100 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
1101 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
1102 if (tmp == 2) {
1103 pr_warn("[drm] r600 flush TLB failed\n");
1104 return;
1105 }
1106 if (tmp) {
1107 return;
1108 }
1109 udelay(1);
1110 }
1111}
1112
1113int r600_pcie_gart_init(struct radeon_device *rdev)
1114{
1115 int r;
1116
1117 if (rdev->gart.robj) {
1118 WARN(1, "R600 PCIE GART already initialized\n");
1119 return 0;
1120 }
1121
1122 r = radeon_gart_init(rdev);
1123 if (r)
1124 return r;
1125 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8;
1126 return radeon_gart_table_vram_alloc(rdev);
1127}
1128
1129static int r600_pcie_gart_enable(struct radeon_device *rdev)
1130{
1131 u32 tmp;
1132 int r, i;
1133
1134 if (rdev->gart.robj == NULL) {
1135 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
1136 return -EINVAL;
1137 }
1138 r = radeon_gart_table_vram_pin(rdev);
1139 if (r)
1140 return r;
1141
1142
1143 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
1144 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
1145 EFFECTIVE_L2_QUEUE_SIZE(7));
1146 WREG32(VM_L2_CNTL2, 0);
1147 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
1148
1149 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
1150 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
1151 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
1152 ENABLE_WAIT_L2_QUERY;
1153 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
1154 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
1155 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp | ENABLE_L1_STRICT_ORDERING);
1156 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1157 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1158 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1159 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1160 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1161 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1162 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1163 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1164 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1165 WREG32(MC_VM_L1_TLB_MCB_RD_UVD_CNTL, tmp);
1166 WREG32(MC_VM_L1_TLB_MCB_WR_UVD_CNTL, tmp);
1167 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1168 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1169 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
1170 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
1171 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
1172 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
1173 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
1174 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
1175 (u32)(rdev->dummy_page.addr >> 12));
1176 for (i = 1; i < 7; i++)
1177 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1178
1179 r600_pcie_gart_tlb_flush(rdev);
1180 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
1181 (unsigned)(rdev->mc.gtt_size >> 20),
1182 (unsigned long long)rdev->gart.table_addr);
1183 rdev->gart.ready = true;
1184 return 0;
1185}
1186
1187static void r600_pcie_gart_disable(struct radeon_device *rdev)
1188{
1189 u32 tmp;
1190 int i;
1191
1192
1193 for (i = 0; i < 7; i++)
1194 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1195
1196
1197 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
1198 EFFECTIVE_L2_QUEUE_SIZE(7));
1199 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
1200
1201 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
1202 ENABLE_WAIT_L2_QUERY;
1203 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1204 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1205 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1206 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1207 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1208 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1209 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1210 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1211 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp);
1212 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp);
1213 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
1214 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
1215 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp);
1216 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1217 WREG32(MC_VM_L1_TLB_MCB_RD_UVD_CNTL, tmp);
1218 WREG32(MC_VM_L1_TLB_MCB_WR_UVD_CNTL, tmp);
1219 radeon_gart_table_vram_unpin(rdev);
1220}
1221
1222static void r600_pcie_gart_fini(struct radeon_device *rdev)
1223{
1224 radeon_gart_fini(rdev);
1225 r600_pcie_gart_disable(rdev);
1226 radeon_gart_table_vram_free(rdev);
1227}
1228
1229static void r600_agp_enable(struct radeon_device *rdev)
1230{
1231 u32 tmp;
1232 int i;
1233
1234
1235 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
1236 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
1237 EFFECTIVE_L2_QUEUE_SIZE(7));
1238 WREG32(VM_L2_CNTL2, 0);
1239 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
1240
1241 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
1242 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
1243 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
1244 ENABLE_WAIT_L2_QUERY;
1245 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
1246 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
1247 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp | ENABLE_L1_STRICT_ORDERING);
1248 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1249 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1250 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1251 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1252 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1253 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1254 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1255 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1256 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1257 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1258 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1259 for (i = 0; i < 7; i++)
1260 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1261}
1262
1263int r600_mc_wait_for_idle(struct radeon_device *rdev)
1264{
1265 unsigned i;
1266 u32 tmp;
1267
1268 for (i = 0; i < rdev->usec_timeout; i++) {
1269
1270 tmp = RREG32(R_000E50_SRBM_STATUS) & 0x3F00;
1271 if (!tmp)
1272 return 0;
1273 udelay(1);
1274 }
1275 return -1;
1276}
1277
1278uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg)
1279{
1280 unsigned long flags;
1281 uint32_t r;
1282
1283 spin_lock_irqsave(&rdev->mc_idx_lock, flags);
1284 WREG32(R_0028F8_MC_INDEX, S_0028F8_MC_IND_ADDR(reg));
1285 r = RREG32(R_0028FC_MC_DATA);
1286 WREG32(R_0028F8_MC_INDEX, ~C_0028F8_MC_IND_ADDR);
1287 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
1288 return r;
1289}
1290
1291void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
1292{
1293 unsigned long flags;
1294
1295 spin_lock_irqsave(&rdev->mc_idx_lock, flags);
1296 WREG32(R_0028F8_MC_INDEX, S_0028F8_MC_IND_ADDR(reg) |
1297 S_0028F8_MC_IND_WR_EN(1));
1298 WREG32(R_0028FC_MC_DATA, v);
1299 WREG32(R_0028F8_MC_INDEX, 0x7F);
1300 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
1301}
1302
1303static void r600_mc_program(struct radeon_device *rdev)
1304{
1305 struct rv515_mc_save save;
1306 u32 tmp;
1307 int i, j;
1308
1309
1310 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1311 WREG32((0x2c14 + j), 0x00000000);
1312 WREG32((0x2c18 + j), 0x00000000);
1313 WREG32((0x2c1c + j), 0x00000000);
1314 WREG32((0x2c20 + j), 0x00000000);
1315 WREG32((0x2c24 + j), 0x00000000);
1316 }
1317 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
1318
1319 rv515_mc_stop(rdev, &save);
1320 if (r600_mc_wait_for_idle(rdev)) {
1321 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1322 }
1323
1324 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
1325
1326 if (rdev->flags & RADEON_IS_AGP) {
1327 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1328
1329 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1330 rdev->mc.vram_start >> 12);
1331 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1332 rdev->mc.gtt_end >> 12);
1333 } else {
1334
1335 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1336 rdev->mc.gtt_start >> 12);
1337 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1338 rdev->mc.vram_end >> 12);
1339 }
1340 } else {
1341 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start >> 12);
1342 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end >> 12);
1343 }
1344 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
1345 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1346 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1347 WREG32(MC_VM_FB_LOCATION, tmp);
1348 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1349 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
1350 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
1351 if (rdev->flags & RADEON_IS_AGP) {
1352 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 22);
1353 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 22);
1354 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1355 } else {
1356 WREG32(MC_VM_AGP_BASE, 0);
1357 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
1358 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
1359 }
1360 if (r600_mc_wait_for_idle(rdev)) {
1361 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1362 }
1363 rv515_mc_resume(rdev, &save);
1364
1365
1366 rv515_vga_render_disable(rdev);
1367}
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390static void r600_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
1391{
1392 u64 size_bf, size_af;
1393
1394 if (mc->mc_vram_size > 0xE0000000) {
1395
1396 dev_warn(rdev->dev, "limiting VRAM\n");
1397 mc->real_vram_size = 0xE0000000;
1398 mc->mc_vram_size = 0xE0000000;
1399 }
1400 if (rdev->flags & RADEON_IS_AGP) {
1401 size_bf = mc->gtt_start;
1402 size_af = mc->mc_mask - mc->gtt_end;
1403 if (size_bf > size_af) {
1404 if (mc->mc_vram_size > size_bf) {
1405 dev_warn(rdev->dev, "limiting VRAM\n");
1406 mc->real_vram_size = size_bf;
1407 mc->mc_vram_size = size_bf;
1408 }
1409 mc->vram_start = mc->gtt_start - mc->mc_vram_size;
1410 } else {
1411 if (mc->mc_vram_size > size_af) {
1412 dev_warn(rdev->dev, "limiting VRAM\n");
1413 mc->real_vram_size = size_af;
1414 mc->mc_vram_size = size_af;
1415 }
1416 mc->vram_start = mc->gtt_end + 1;
1417 }
1418 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
1419 dev_info(rdev->dev, "VRAM: %lluM 0x%08llX - 0x%08llX (%lluM used)\n",
1420 mc->mc_vram_size >> 20, mc->vram_start,
1421 mc->vram_end, mc->real_vram_size >> 20);
1422 } else {
1423 u64 base = 0;
1424 if (rdev->flags & RADEON_IS_IGP) {
1425 base = RREG32(MC_VM_FB_LOCATION) & 0xFFFF;
1426 base <<= 24;
1427 }
1428 radeon_vram_location(rdev, &rdev->mc, base);
1429 rdev->mc.gtt_base_align = 0;
1430 radeon_gtt_location(rdev, mc);
1431 }
1432}
1433
1434static int r600_mc_init(struct radeon_device *rdev)
1435{
1436 u32 tmp;
1437 int chansize, numchan;
1438 uint32_t h_addr, l_addr;
1439 unsigned long long k8_addr;
1440
1441
1442 rdev->mc.vram_is_ddr = true;
1443 tmp = RREG32(RAMCFG);
1444 if (tmp & CHANSIZE_OVERRIDE) {
1445 chansize = 16;
1446 } else if (tmp & CHANSIZE_MASK) {
1447 chansize = 64;
1448 } else {
1449 chansize = 32;
1450 }
1451 tmp = RREG32(CHMAP);
1452 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1453 case 0:
1454 default:
1455 numchan = 1;
1456 break;
1457 case 1:
1458 numchan = 2;
1459 break;
1460 case 2:
1461 numchan = 4;
1462 break;
1463 case 3:
1464 numchan = 8;
1465 break;
1466 }
1467 rdev->mc.vram_width = numchan * chansize;
1468
1469 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1470 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1471
1472 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
1473 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
1474 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1475 r600_vram_gtt_location(rdev, &rdev->mc);
1476
1477 if (rdev->flags & RADEON_IS_IGP) {
1478 rs690_pm_info(rdev);
1479 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
1480
1481 if (rdev->family == CHIP_RS780 || rdev->family == CHIP_RS880) {
1482
1483 rdev->fastfb_working = false;
1484 h_addr = G_000012_K8_ADDR_EXT(RREG32_MC(R_000012_MC_MISC_UMA_CNTL));
1485 l_addr = RREG32_MC(R_000011_K8_FB_LOCATION);
1486 k8_addr = ((unsigned long long)h_addr) << 32 | l_addr;
1487#if defined(CONFIG_X86_32) && !defined(CONFIG_X86_PAE)
1488 if (k8_addr + rdev->mc.visible_vram_size < 0x100000000ULL)
1489#endif
1490 {
1491
1492
1493
1494 if (rdev->mc.igp_sideport_enabled == false && radeon_fastfb == 1) {
1495 DRM_INFO("Direct mapping: aper base at 0x%llx, replaced by direct mapping base 0x%llx.\n",
1496 (unsigned long long)rdev->mc.aper_base, k8_addr);
1497 rdev->mc.aper_base = (resource_size_t)k8_addr;
1498 rdev->fastfb_working = true;
1499 }
1500 }
1501 }
1502 }
1503
1504 radeon_update_bandwidth_info(rdev);
1505 return 0;
1506}
1507
1508int r600_vram_scratch_init(struct radeon_device *rdev)
1509{
1510 int r;
1511
1512 if (rdev->vram_scratch.robj == NULL) {
1513 r = radeon_bo_create(rdev, RADEON_GPU_PAGE_SIZE,
1514 PAGE_SIZE, true, RADEON_GEM_DOMAIN_VRAM,
1515 0, NULL, NULL, &rdev->vram_scratch.robj);
1516 if (r) {
1517 return r;
1518 }
1519 }
1520
1521 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1522 if (unlikely(r != 0))
1523 return r;
1524 r = radeon_bo_pin(rdev->vram_scratch.robj,
1525 RADEON_GEM_DOMAIN_VRAM, &rdev->vram_scratch.gpu_addr);
1526 if (r) {
1527 radeon_bo_unreserve(rdev->vram_scratch.robj);
1528 return r;
1529 }
1530 r = radeon_bo_kmap(rdev->vram_scratch.robj,
1531 (void **)&rdev->vram_scratch.ptr);
1532 if (r)
1533 radeon_bo_unpin(rdev->vram_scratch.robj);
1534 radeon_bo_unreserve(rdev->vram_scratch.robj);
1535
1536 return r;
1537}
1538
1539void r600_vram_scratch_fini(struct radeon_device *rdev)
1540{
1541 int r;
1542
1543 if (rdev->vram_scratch.robj == NULL) {
1544 return;
1545 }
1546 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1547 if (likely(r == 0)) {
1548 radeon_bo_kunmap(rdev->vram_scratch.robj);
1549 radeon_bo_unpin(rdev->vram_scratch.robj);
1550 radeon_bo_unreserve(rdev->vram_scratch.robj);
1551 }
1552 radeon_bo_unref(&rdev->vram_scratch.robj);
1553}
1554
1555void r600_set_bios_scratch_engine_hung(struct radeon_device *rdev, bool hung)
1556{
1557 u32 tmp = RREG32(R600_BIOS_3_SCRATCH);
1558
1559 if (hung)
1560 tmp |= ATOM_S3_ASIC_GUI_ENGINE_HUNG;
1561 else
1562 tmp &= ~ATOM_S3_ASIC_GUI_ENGINE_HUNG;
1563
1564 WREG32(R600_BIOS_3_SCRATCH, tmp);
1565}
1566
1567static void r600_print_gpu_status_regs(struct radeon_device *rdev)
1568{
1569 dev_info(rdev->dev, " R_008010_GRBM_STATUS = 0x%08X\n",
1570 RREG32(R_008010_GRBM_STATUS));
1571 dev_info(rdev->dev, " R_008014_GRBM_STATUS2 = 0x%08X\n",
1572 RREG32(R_008014_GRBM_STATUS2));
1573 dev_info(rdev->dev, " R_000E50_SRBM_STATUS = 0x%08X\n",
1574 RREG32(R_000E50_SRBM_STATUS));
1575 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
1576 RREG32(CP_STALLED_STAT1));
1577 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
1578 RREG32(CP_STALLED_STAT2));
1579 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
1580 RREG32(CP_BUSY_STAT));
1581 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
1582 RREG32(CP_STAT));
1583 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
1584 RREG32(DMA_STATUS_REG));
1585}
1586
1587static bool r600_is_display_hung(struct radeon_device *rdev)
1588{
1589 u32 crtc_hung = 0;
1590 u32 crtc_status[2];
1591 u32 i, j, tmp;
1592
1593 for (i = 0; i < rdev->num_crtc; i++) {
1594 if (RREG32(AVIVO_D1CRTC_CONTROL + crtc_offsets[i]) & AVIVO_CRTC_EN) {
1595 crtc_status[i] = RREG32(AVIVO_D1CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
1596 crtc_hung |= (1 << i);
1597 }
1598 }
1599
1600 for (j = 0; j < 10; j++) {
1601 for (i = 0; i < rdev->num_crtc; i++) {
1602 if (crtc_hung & (1 << i)) {
1603 tmp = RREG32(AVIVO_D1CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
1604 if (tmp != crtc_status[i])
1605 crtc_hung &= ~(1 << i);
1606 }
1607 }
1608 if (crtc_hung == 0)
1609 return false;
1610 udelay(100);
1611 }
1612
1613 return true;
1614}
1615
1616u32 r600_gpu_check_soft_reset(struct radeon_device *rdev)
1617{
1618 u32 reset_mask = 0;
1619 u32 tmp;
1620
1621
1622 tmp = RREG32(R_008010_GRBM_STATUS);
1623 if (rdev->family >= CHIP_RV770) {
1624 if (G_008010_PA_BUSY(tmp) | G_008010_SC_BUSY(tmp) |
1625 G_008010_SH_BUSY(tmp) | G_008010_SX_BUSY(tmp) |
1626 G_008010_TA_BUSY(tmp) | G_008010_VGT_BUSY(tmp) |
1627 G_008010_DB03_BUSY(tmp) | G_008010_CB03_BUSY(tmp) |
1628 G_008010_SPI03_BUSY(tmp) | G_008010_VGT_BUSY_NO_DMA(tmp))
1629 reset_mask |= RADEON_RESET_GFX;
1630 } else {
1631 if (G_008010_PA_BUSY(tmp) | G_008010_SC_BUSY(tmp) |
1632 G_008010_SH_BUSY(tmp) | G_008010_SX_BUSY(tmp) |
1633 G_008010_TA03_BUSY(tmp) | G_008010_VGT_BUSY(tmp) |
1634 G_008010_DB03_BUSY(tmp) | G_008010_CB03_BUSY(tmp) |
1635 G_008010_SPI03_BUSY(tmp) | G_008010_VGT_BUSY_NO_DMA(tmp))
1636 reset_mask |= RADEON_RESET_GFX;
1637 }
1638
1639 if (G_008010_CF_RQ_PENDING(tmp) | G_008010_PF_RQ_PENDING(tmp) |
1640 G_008010_CP_BUSY(tmp) | G_008010_CP_COHERENCY_BUSY(tmp))
1641 reset_mask |= RADEON_RESET_CP;
1642
1643 if (G_008010_GRBM_EE_BUSY(tmp))
1644 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
1645
1646
1647 tmp = RREG32(DMA_STATUS_REG);
1648 if (!(tmp & DMA_IDLE))
1649 reset_mask |= RADEON_RESET_DMA;
1650
1651
1652 tmp = RREG32(R_000E50_SRBM_STATUS);
1653 if (G_000E50_RLC_RQ_PENDING(tmp) | G_000E50_RLC_BUSY(tmp))
1654 reset_mask |= RADEON_RESET_RLC;
1655
1656 if (G_000E50_IH_BUSY(tmp))
1657 reset_mask |= RADEON_RESET_IH;
1658
1659 if (G_000E50_SEM_BUSY(tmp))
1660 reset_mask |= RADEON_RESET_SEM;
1661
1662 if (G_000E50_GRBM_RQ_PENDING(tmp))
1663 reset_mask |= RADEON_RESET_GRBM;
1664
1665 if (G_000E50_VMC_BUSY(tmp))
1666 reset_mask |= RADEON_RESET_VMC;
1667
1668 if (G_000E50_MCB_BUSY(tmp) | G_000E50_MCDZ_BUSY(tmp) |
1669 G_000E50_MCDY_BUSY(tmp) | G_000E50_MCDX_BUSY(tmp) |
1670 G_000E50_MCDW_BUSY(tmp))
1671 reset_mask |= RADEON_RESET_MC;
1672
1673 if (r600_is_display_hung(rdev))
1674 reset_mask |= RADEON_RESET_DISPLAY;
1675
1676
1677 if (reset_mask & RADEON_RESET_MC) {
1678 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
1679 reset_mask &= ~RADEON_RESET_MC;
1680 }
1681
1682 return reset_mask;
1683}
1684
1685static void r600_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
1686{
1687 struct rv515_mc_save save;
1688 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
1689 u32 tmp;
1690
1691 if (reset_mask == 0)
1692 return;
1693
1694 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
1695
1696 r600_print_gpu_status_regs(rdev);
1697
1698
1699 if (rdev->family >= CHIP_RV770)
1700 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1) | S_0086D8_CP_PFP_HALT(1));
1701 else
1702 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
1703
1704
1705 WREG32(RLC_CNTL, 0);
1706
1707 if (reset_mask & RADEON_RESET_DMA) {
1708
1709 tmp = RREG32(DMA_RB_CNTL);
1710 tmp &= ~DMA_RB_ENABLE;
1711 WREG32(DMA_RB_CNTL, tmp);
1712 }
1713
1714 mdelay(50);
1715
1716 rv515_mc_stop(rdev, &save);
1717 if (r600_mc_wait_for_idle(rdev)) {
1718 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1719 }
1720
1721 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
1722 if (rdev->family >= CHIP_RV770)
1723 grbm_soft_reset |= S_008020_SOFT_RESET_DB(1) |
1724 S_008020_SOFT_RESET_CB(1) |
1725 S_008020_SOFT_RESET_PA(1) |
1726 S_008020_SOFT_RESET_SC(1) |
1727 S_008020_SOFT_RESET_SPI(1) |
1728 S_008020_SOFT_RESET_SX(1) |
1729 S_008020_SOFT_RESET_SH(1) |
1730 S_008020_SOFT_RESET_TC(1) |
1731 S_008020_SOFT_RESET_TA(1) |
1732 S_008020_SOFT_RESET_VC(1) |
1733 S_008020_SOFT_RESET_VGT(1);
1734 else
1735 grbm_soft_reset |= S_008020_SOFT_RESET_CR(1) |
1736 S_008020_SOFT_RESET_DB(1) |
1737 S_008020_SOFT_RESET_CB(1) |
1738 S_008020_SOFT_RESET_PA(1) |
1739 S_008020_SOFT_RESET_SC(1) |
1740 S_008020_SOFT_RESET_SMX(1) |
1741 S_008020_SOFT_RESET_SPI(1) |
1742 S_008020_SOFT_RESET_SX(1) |
1743 S_008020_SOFT_RESET_SH(1) |
1744 S_008020_SOFT_RESET_TC(1) |
1745 S_008020_SOFT_RESET_TA(1) |
1746 S_008020_SOFT_RESET_VC(1) |
1747 S_008020_SOFT_RESET_VGT(1);
1748 }
1749
1750 if (reset_mask & RADEON_RESET_CP) {
1751 grbm_soft_reset |= S_008020_SOFT_RESET_CP(1) |
1752 S_008020_SOFT_RESET_VGT(1);
1753
1754 srbm_soft_reset |= S_000E60_SOFT_RESET_GRBM(1);
1755 }
1756
1757 if (reset_mask & RADEON_RESET_DMA) {
1758 if (rdev->family >= CHIP_RV770)
1759 srbm_soft_reset |= RV770_SOFT_RESET_DMA;
1760 else
1761 srbm_soft_reset |= SOFT_RESET_DMA;
1762 }
1763
1764 if (reset_mask & RADEON_RESET_RLC)
1765 srbm_soft_reset |= S_000E60_SOFT_RESET_RLC(1);
1766
1767 if (reset_mask & RADEON_RESET_SEM)
1768 srbm_soft_reset |= S_000E60_SOFT_RESET_SEM(1);
1769
1770 if (reset_mask & RADEON_RESET_IH)
1771 srbm_soft_reset |= S_000E60_SOFT_RESET_IH(1);
1772
1773 if (reset_mask & RADEON_RESET_GRBM)
1774 srbm_soft_reset |= S_000E60_SOFT_RESET_GRBM(1);
1775
1776 if (!(rdev->flags & RADEON_IS_IGP)) {
1777 if (reset_mask & RADEON_RESET_MC)
1778 srbm_soft_reset |= S_000E60_SOFT_RESET_MC(1);
1779 }
1780
1781 if (reset_mask & RADEON_RESET_VMC)
1782 srbm_soft_reset |= S_000E60_SOFT_RESET_VMC(1);
1783
1784 if (grbm_soft_reset) {
1785 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1786 tmp |= grbm_soft_reset;
1787 dev_info(rdev->dev, "R_008020_GRBM_SOFT_RESET=0x%08X\n", tmp);
1788 WREG32(R_008020_GRBM_SOFT_RESET, tmp);
1789 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1790
1791 udelay(50);
1792
1793 tmp &= ~grbm_soft_reset;
1794 WREG32(R_008020_GRBM_SOFT_RESET, tmp);
1795 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1796 }
1797
1798 if (srbm_soft_reset) {
1799 tmp = RREG32(SRBM_SOFT_RESET);
1800 tmp |= srbm_soft_reset;
1801 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
1802 WREG32(SRBM_SOFT_RESET, tmp);
1803 tmp = RREG32(SRBM_SOFT_RESET);
1804
1805 udelay(50);
1806
1807 tmp &= ~srbm_soft_reset;
1808 WREG32(SRBM_SOFT_RESET, tmp);
1809 tmp = RREG32(SRBM_SOFT_RESET);
1810 }
1811
1812
1813 mdelay(1);
1814
1815 rv515_mc_resume(rdev, &save);
1816 udelay(50);
1817
1818 r600_print_gpu_status_regs(rdev);
1819}
1820
1821static void r600_gpu_pci_config_reset(struct radeon_device *rdev)
1822{
1823 struct rv515_mc_save save;
1824 u32 tmp, i;
1825
1826 dev_info(rdev->dev, "GPU pci config reset\n");
1827
1828
1829
1830
1831 if (rdev->family >= CHIP_RV770)
1832 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1) | S_0086D8_CP_PFP_HALT(1));
1833 else
1834 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
1835
1836
1837 WREG32(RLC_CNTL, 0);
1838
1839
1840 tmp = RREG32(DMA_RB_CNTL);
1841 tmp &= ~DMA_RB_ENABLE;
1842 WREG32(DMA_RB_CNTL, tmp);
1843
1844 mdelay(50);
1845
1846
1847 if (rdev->family >= CHIP_RV770)
1848 rv770_set_clk_bypass_mode(rdev);
1849
1850 pci_clear_master(rdev->pdev);
1851
1852 rv515_mc_stop(rdev, &save);
1853 if (r600_mc_wait_for_idle(rdev)) {
1854 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1855 }
1856
1857
1858 tmp = RREG32(BUS_CNTL);
1859 tmp |= VGA_COHE_SPEC_TIMER_DIS;
1860 WREG32(BUS_CNTL, tmp);
1861
1862 tmp = RREG32(BIF_SCRATCH0);
1863
1864
1865 radeon_pci_config_reset(rdev);
1866 mdelay(1);
1867
1868
1869 tmp = SOFT_RESET_BIF;
1870 WREG32(SRBM_SOFT_RESET, tmp);
1871 mdelay(1);
1872 WREG32(SRBM_SOFT_RESET, 0);
1873
1874
1875 for (i = 0; i < rdev->usec_timeout; i++) {
1876 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
1877 break;
1878 udelay(1);
1879 }
1880}
1881
1882int r600_asic_reset(struct radeon_device *rdev, bool hard)
1883{
1884 u32 reset_mask;
1885
1886 if (hard) {
1887 r600_gpu_pci_config_reset(rdev);
1888 return 0;
1889 }
1890
1891 reset_mask = r600_gpu_check_soft_reset(rdev);
1892
1893 if (reset_mask)
1894 r600_set_bios_scratch_engine_hung(rdev, true);
1895
1896
1897 r600_gpu_soft_reset(rdev, reset_mask);
1898
1899 reset_mask = r600_gpu_check_soft_reset(rdev);
1900
1901
1902 if (reset_mask && radeon_hard_reset)
1903 r600_gpu_pci_config_reset(rdev);
1904
1905 reset_mask = r600_gpu_check_soft_reset(rdev);
1906
1907 if (!reset_mask)
1908 r600_set_bios_scratch_engine_hung(rdev, false);
1909
1910 return 0;
1911}
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
1923{
1924 u32 reset_mask = r600_gpu_check_soft_reset(rdev);
1925
1926 if (!(reset_mask & (RADEON_RESET_GFX |
1927 RADEON_RESET_COMPUTE |
1928 RADEON_RESET_CP))) {
1929 radeon_ring_lockup_update(rdev, ring);
1930 return false;
1931 }
1932 return radeon_ring_test_lockup(rdev, ring);
1933}
1934
1935u32 r6xx_remap_render_backend(struct radeon_device *rdev,
1936 u32 tiling_pipe_num,
1937 u32 max_rb_num,
1938 u32 total_max_rb_num,
1939 u32 disabled_rb_mask)
1940{
1941 u32 rendering_pipe_num, rb_num_width, req_rb_num;
1942 u32 pipe_rb_ratio, pipe_rb_remain, tmp;
1943 u32 data = 0, mask = 1 << (max_rb_num - 1);
1944 unsigned i, j;
1945
1946
1947 tmp = disabled_rb_mask | ((0xff << max_rb_num) & 0xff);
1948
1949 if ((tmp & 0xff) != 0xff)
1950 disabled_rb_mask = tmp;
1951
1952 rendering_pipe_num = 1 << tiling_pipe_num;
1953 req_rb_num = total_max_rb_num - r600_count_pipe_bits(disabled_rb_mask);
1954 BUG_ON(rendering_pipe_num < req_rb_num);
1955
1956 pipe_rb_ratio = rendering_pipe_num / req_rb_num;
1957 pipe_rb_remain = rendering_pipe_num - pipe_rb_ratio * req_rb_num;
1958
1959 if (rdev->family <= CHIP_RV740) {
1960
1961 rb_num_width = 2;
1962 } else {
1963
1964 rb_num_width = 4;
1965 }
1966
1967 for (i = 0; i < max_rb_num; i++) {
1968 if (!(mask & disabled_rb_mask)) {
1969 for (j = 0; j < pipe_rb_ratio; j++) {
1970 data <<= rb_num_width;
1971 data |= max_rb_num - i - 1;
1972 }
1973 if (pipe_rb_remain) {
1974 data <<= rb_num_width;
1975 data |= max_rb_num - i - 1;
1976 pipe_rb_remain--;
1977 }
1978 }
1979 mask >>= 1;
1980 }
1981
1982 return data;
1983}
1984
1985int r600_count_pipe_bits(uint32_t val)
1986{
1987 return hweight32(val);
1988}
1989
1990static void r600_gpu_init(struct radeon_device *rdev)
1991{
1992 u32 tiling_config;
1993 u32 ramcfg;
1994 u32 cc_gc_shader_pipe_config;
1995 u32 tmp;
1996 int i, j;
1997 u32 sq_config;
1998 u32 sq_gpr_resource_mgmt_1 = 0;
1999 u32 sq_gpr_resource_mgmt_2 = 0;
2000 u32 sq_thread_resource_mgmt = 0;
2001 u32 sq_stack_resource_mgmt_1 = 0;
2002 u32 sq_stack_resource_mgmt_2 = 0;
2003 u32 disabled_rb_mask;
2004
2005 rdev->config.r600.tiling_group_size = 256;
2006 switch (rdev->family) {
2007 case CHIP_R600:
2008 rdev->config.r600.max_pipes = 4;
2009 rdev->config.r600.max_tile_pipes = 8;
2010 rdev->config.r600.max_simds = 4;
2011 rdev->config.r600.max_backends = 4;
2012 rdev->config.r600.max_gprs = 256;
2013 rdev->config.r600.max_threads = 192;
2014 rdev->config.r600.max_stack_entries = 256;
2015 rdev->config.r600.max_hw_contexts = 8;
2016 rdev->config.r600.max_gs_threads = 16;
2017 rdev->config.r600.sx_max_export_size = 128;
2018 rdev->config.r600.sx_max_export_pos_size = 16;
2019 rdev->config.r600.sx_max_export_smx_size = 128;
2020 rdev->config.r600.sq_num_cf_insts = 2;
2021 break;
2022 case CHIP_RV630:
2023 case CHIP_RV635:
2024 rdev->config.r600.max_pipes = 2;
2025 rdev->config.r600.max_tile_pipes = 2;
2026 rdev->config.r600.max_simds = 3;
2027 rdev->config.r600.max_backends = 1;
2028 rdev->config.r600.max_gprs = 128;
2029 rdev->config.r600.max_threads = 192;
2030 rdev->config.r600.max_stack_entries = 128;
2031 rdev->config.r600.max_hw_contexts = 8;
2032 rdev->config.r600.max_gs_threads = 4;
2033 rdev->config.r600.sx_max_export_size = 128;
2034 rdev->config.r600.sx_max_export_pos_size = 16;
2035 rdev->config.r600.sx_max_export_smx_size = 128;
2036 rdev->config.r600.sq_num_cf_insts = 2;
2037 break;
2038 case CHIP_RV610:
2039 case CHIP_RV620:
2040 case CHIP_RS780:
2041 case CHIP_RS880:
2042 rdev->config.r600.max_pipes = 1;
2043 rdev->config.r600.max_tile_pipes = 1;
2044 rdev->config.r600.max_simds = 2;
2045 rdev->config.r600.max_backends = 1;
2046 rdev->config.r600.max_gprs = 128;
2047 rdev->config.r600.max_threads = 192;
2048 rdev->config.r600.max_stack_entries = 128;
2049 rdev->config.r600.max_hw_contexts = 4;
2050 rdev->config.r600.max_gs_threads = 4;
2051 rdev->config.r600.sx_max_export_size = 128;
2052 rdev->config.r600.sx_max_export_pos_size = 16;
2053 rdev->config.r600.sx_max_export_smx_size = 128;
2054 rdev->config.r600.sq_num_cf_insts = 1;
2055 break;
2056 case CHIP_RV670:
2057 rdev->config.r600.max_pipes = 4;
2058 rdev->config.r600.max_tile_pipes = 4;
2059 rdev->config.r600.max_simds = 4;
2060 rdev->config.r600.max_backends = 4;
2061 rdev->config.r600.max_gprs = 192;
2062 rdev->config.r600.max_threads = 192;
2063 rdev->config.r600.max_stack_entries = 256;
2064 rdev->config.r600.max_hw_contexts = 8;
2065 rdev->config.r600.max_gs_threads = 16;
2066 rdev->config.r600.sx_max_export_size = 128;
2067 rdev->config.r600.sx_max_export_pos_size = 16;
2068 rdev->config.r600.sx_max_export_smx_size = 128;
2069 rdev->config.r600.sq_num_cf_insts = 2;
2070 break;
2071 default:
2072 break;
2073 }
2074
2075
2076 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2077 WREG32((0x2c14 + j), 0x00000000);
2078 WREG32((0x2c18 + j), 0x00000000);
2079 WREG32((0x2c1c + j), 0x00000000);
2080 WREG32((0x2c20 + j), 0x00000000);
2081 WREG32((0x2c24 + j), 0x00000000);
2082 }
2083
2084 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
2085
2086
2087 tiling_config = 0;
2088 ramcfg = RREG32(RAMCFG);
2089 switch (rdev->config.r600.max_tile_pipes) {
2090 case 1:
2091 tiling_config |= PIPE_TILING(0);
2092 break;
2093 case 2:
2094 tiling_config |= PIPE_TILING(1);
2095 break;
2096 case 4:
2097 tiling_config |= PIPE_TILING(2);
2098 break;
2099 case 8:
2100 tiling_config |= PIPE_TILING(3);
2101 break;
2102 default:
2103 break;
2104 }
2105 rdev->config.r600.tiling_npipes = rdev->config.r600.max_tile_pipes;
2106 rdev->config.r600.tiling_nbanks = 4 << ((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
2107 tiling_config |= BANK_TILING((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
2108 tiling_config |= GROUP_SIZE((ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
2109
2110 tmp = (ramcfg & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
2111 if (tmp > 3) {
2112 tiling_config |= ROW_TILING(3);
2113 tiling_config |= SAMPLE_SPLIT(3);
2114 } else {
2115 tiling_config |= ROW_TILING(tmp);
2116 tiling_config |= SAMPLE_SPLIT(tmp);
2117 }
2118 tiling_config |= BANK_SWAPS(1);
2119
2120 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0x00ffff00;
2121 tmp = rdev->config.r600.max_simds -
2122 r600_count_pipe_bits((cc_gc_shader_pipe_config >> 16) & R6XX_MAX_SIMDS_MASK);
2123 rdev->config.r600.active_simds = tmp;
2124
2125 disabled_rb_mask = (RREG32(CC_RB_BACKEND_DISABLE) >> 16) & R6XX_MAX_BACKENDS_MASK;
2126 tmp = 0;
2127 for (i = 0; i < rdev->config.r600.max_backends; i++)
2128 tmp |= (1 << i);
2129
2130 if ((disabled_rb_mask & tmp) == tmp) {
2131 for (i = 0; i < rdev->config.r600.max_backends; i++)
2132 disabled_rb_mask &= ~(1 << i);
2133 }
2134 tmp = (tiling_config & PIPE_TILING__MASK) >> PIPE_TILING__SHIFT;
2135 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.r600.max_backends,
2136 R6XX_MAX_BACKENDS, disabled_rb_mask);
2137 tiling_config |= tmp << 16;
2138 rdev->config.r600.backend_map = tmp;
2139
2140 rdev->config.r600.tile_config = tiling_config;
2141 WREG32(GB_TILING_CONFIG, tiling_config);
2142 WREG32(DCP_TILING_CONFIG, tiling_config & 0xffff);
2143 WREG32(HDP_TILING_CONFIG, tiling_config & 0xffff);
2144 WREG32(DMA_TILING_CONFIG, tiling_config & 0xffff);
2145
2146 tmp = R6XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK) >> 8);
2147 WREG32(VGT_OUT_DEALLOC_CNTL, (tmp * 4) & DEALLOC_DIST_MASK);
2148 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((tmp * 4) - 2) & VTX_REUSE_DEPTH_MASK);
2149
2150
2151 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) | ROQ_IB2_START(0x2b)));
2152 WREG32(CP_MEQ_THRESHOLDS, (MEQ_END(0x40) | ROQ_END(0x40)));
2153
2154 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO | SYNC_GRADIENT |
2155 SYNC_WALKER | SYNC_ALIGNER));
2156
2157 if (rdev->family == CHIP_RV670)
2158 WREG32(ARB_GDEC_RD_CNTL, 0x00000021);
2159
2160 tmp = RREG32(SX_DEBUG_1);
2161 tmp |= SMX_EVENT_RELEASE;
2162 if ((rdev->family > CHIP_R600))
2163 tmp |= ENABLE_NEW_SMX_ADDRESS;
2164 WREG32(SX_DEBUG_1, tmp);
2165
2166 if (((rdev->family) == CHIP_R600) ||
2167 ((rdev->family) == CHIP_RV630) ||
2168 ((rdev->family) == CHIP_RV610) ||
2169 ((rdev->family) == CHIP_RV620) ||
2170 ((rdev->family) == CHIP_RS780) ||
2171 ((rdev->family) == CHIP_RS880)) {
2172 WREG32(DB_DEBUG, PREZ_MUST_WAIT_FOR_POSTZ_DONE);
2173 } else {
2174 WREG32(DB_DEBUG, 0);
2175 }
2176 WREG32(DB_WATERMARKS, (DEPTH_FREE(4) | DEPTH_CACHELINE_FREE(16) |
2177 DEPTH_FLUSH(16) | DEPTH_PENDING_FREE(4)));
2178
2179 WREG32(PA_SC_MULTI_CHIP_CNTL, 0);
2180 WREG32(VGT_NUM_INSTANCES, 0);
2181
2182 WREG32(SPI_CONFIG_CNTL, GPR_WRITE_PRIORITY(0));
2183 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(0));
2184
2185 tmp = RREG32(SQ_MS_FIFO_SIZES);
2186 if (((rdev->family) == CHIP_RV610) ||
2187 ((rdev->family) == CHIP_RV620) ||
2188 ((rdev->family) == CHIP_RS780) ||
2189 ((rdev->family) == CHIP_RS880)) {
2190 tmp = (CACHE_FIFO_SIZE(0xa) |
2191 FETCH_FIFO_HIWATER(0xa) |
2192 DONE_FIFO_HIWATER(0xe0) |
2193 ALU_UPDATE_FIFO_HIWATER(0x8));
2194 } else if (((rdev->family) == CHIP_R600) ||
2195 ((rdev->family) == CHIP_RV630)) {
2196 tmp &= ~DONE_FIFO_HIWATER(0xff);
2197 tmp |= DONE_FIFO_HIWATER(0x4);
2198 }
2199 WREG32(SQ_MS_FIFO_SIZES, tmp);
2200
2201
2202
2203
2204 sq_config = RREG32(SQ_CONFIG);
2205 sq_config &= ~(PS_PRIO(3) |
2206 VS_PRIO(3) |
2207 GS_PRIO(3) |
2208 ES_PRIO(3));
2209 sq_config |= (DX9_CONSTS |
2210 VC_ENABLE |
2211 PS_PRIO(0) |
2212 VS_PRIO(1) |
2213 GS_PRIO(2) |
2214 ES_PRIO(3));
2215
2216 if ((rdev->family) == CHIP_R600) {
2217 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(124) |
2218 NUM_VS_GPRS(124) |
2219 NUM_CLAUSE_TEMP_GPRS(4));
2220 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(0) |
2221 NUM_ES_GPRS(0));
2222 sq_thread_resource_mgmt = (NUM_PS_THREADS(136) |
2223 NUM_VS_THREADS(48) |
2224 NUM_GS_THREADS(4) |
2225 NUM_ES_THREADS(4));
2226 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(128) |
2227 NUM_VS_STACK_ENTRIES(128));
2228 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(0) |
2229 NUM_ES_STACK_ENTRIES(0));
2230 } else if (((rdev->family) == CHIP_RV610) ||
2231 ((rdev->family) == CHIP_RV620) ||
2232 ((rdev->family) == CHIP_RS780) ||
2233 ((rdev->family) == CHIP_RS880)) {
2234
2235 sq_config &= ~VC_ENABLE;
2236
2237 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
2238 NUM_VS_GPRS(44) |
2239 NUM_CLAUSE_TEMP_GPRS(2));
2240 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(17) |
2241 NUM_ES_GPRS(17));
2242 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
2243 NUM_VS_THREADS(78) |
2244 NUM_GS_THREADS(4) |
2245 NUM_ES_THREADS(31));
2246 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(40) |
2247 NUM_VS_STACK_ENTRIES(40));
2248 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(32) |
2249 NUM_ES_STACK_ENTRIES(16));
2250 } else if (((rdev->family) == CHIP_RV630) ||
2251 ((rdev->family) == CHIP_RV635)) {
2252 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
2253 NUM_VS_GPRS(44) |
2254 NUM_CLAUSE_TEMP_GPRS(2));
2255 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(18) |
2256 NUM_ES_GPRS(18));
2257 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
2258 NUM_VS_THREADS(78) |
2259 NUM_GS_THREADS(4) |
2260 NUM_ES_THREADS(31));
2261 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(40) |
2262 NUM_VS_STACK_ENTRIES(40));
2263 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(32) |
2264 NUM_ES_STACK_ENTRIES(16));
2265 } else if ((rdev->family) == CHIP_RV670) {
2266 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
2267 NUM_VS_GPRS(44) |
2268 NUM_CLAUSE_TEMP_GPRS(2));
2269 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(17) |
2270 NUM_ES_GPRS(17));
2271 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
2272 NUM_VS_THREADS(78) |
2273 NUM_GS_THREADS(4) |
2274 NUM_ES_THREADS(31));
2275 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(64) |
2276 NUM_VS_STACK_ENTRIES(64));
2277 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(64) |
2278 NUM_ES_STACK_ENTRIES(64));
2279 }
2280
2281 WREG32(SQ_CONFIG, sq_config);
2282 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
2283 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
2284 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
2285 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
2286 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
2287
2288 if (((rdev->family) == CHIP_RV610) ||
2289 ((rdev->family) == CHIP_RV620) ||
2290 ((rdev->family) == CHIP_RS780) ||
2291 ((rdev->family) == CHIP_RS880)) {
2292 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(TC_ONLY));
2293 } else {
2294 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(VC_AND_TC));
2295 }
2296
2297
2298 WREG32(PA_SC_AA_SAMPLE_LOCS_2S, (S0_X(0xc) | S0_Y(0x4) |
2299 S1_X(0x4) | S1_Y(0xc)));
2300 WREG32(PA_SC_AA_SAMPLE_LOCS_4S, (S0_X(0xe) | S0_Y(0xe) |
2301 S1_X(0x2) | S1_Y(0x2) |
2302 S2_X(0xa) | S2_Y(0x6) |
2303 S3_X(0x6) | S3_Y(0xa)));
2304 WREG32(PA_SC_AA_SAMPLE_LOCS_8S_WD0, (S0_X(0xe) | S0_Y(0xb) |
2305 S1_X(0x4) | S1_Y(0xc) |
2306 S2_X(0x1) | S2_Y(0x6) |
2307 S3_X(0xa) | S3_Y(0xe)));
2308 WREG32(PA_SC_AA_SAMPLE_LOCS_8S_WD1, (S4_X(0x6) | S4_Y(0x1) |
2309 S5_X(0x0) | S5_Y(0x0) |
2310 S6_X(0xb) | S6_Y(0x4) |
2311 S7_X(0x7) | S7_Y(0x8)));
2312
2313 WREG32(VGT_STRMOUT_EN, 0);
2314 tmp = rdev->config.r600.max_pipes * 16;
2315 switch (rdev->family) {
2316 case CHIP_RV610:
2317 case CHIP_RV620:
2318 case CHIP_RS780:
2319 case CHIP_RS880:
2320 tmp += 32;
2321 break;
2322 case CHIP_RV670:
2323 tmp += 128;
2324 break;
2325 default:
2326 break;
2327 }
2328 if (tmp > 256) {
2329 tmp = 256;
2330 }
2331 WREG32(VGT_ES_PER_GS, 128);
2332 WREG32(VGT_GS_PER_ES, tmp);
2333 WREG32(VGT_GS_PER_VS, 2);
2334 WREG32(VGT_GS_VERTEX_REUSE, 16);
2335
2336
2337 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
2338 WREG32(VGT_STRMOUT_EN, 0);
2339 WREG32(SX_MISC, 0);
2340 WREG32(PA_SC_MODE_CNTL, 0);
2341 WREG32(PA_SC_AA_CONFIG, 0);
2342 WREG32(PA_SC_LINE_STIPPLE, 0);
2343 WREG32(SPI_INPUT_Z, 0);
2344 WREG32(SPI_PS_IN_CONTROL_0, NUM_INTERP(2));
2345 WREG32(CB_COLOR7_FRAG, 0);
2346
2347
2348 WREG32(CB_COLOR0_BASE, 0);
2349 WREG32(CB_COLOR1_BASE, 0);
2350 WREG32(CB_COLOR2_BASE, 0);
2351 WREG32(CB_COLOR3_BASE, 0);
2352 WREG32(CB_COLOR4_BASE, 0);
2353 WREG32(CB_COLOR5_BASE, 0);
2354 WREG32(CB_COLOR6_BASE, 0);
2355 WREG32(CB_COLOR7_BASE, 0);
2356 WREG32(CB_COLOR7_FRAG, 0);
2357
2358 switch (rdev->family) {
2359 case CHIP_RV610:
2360 case CHIP_RV620:
2361 case CHIP_RS780:
2362 case CHIP_RS880:
2363 tmp = TC_L2_SIZE(8);
2364 break;
2365 case CHIP_RV630:
2366 case CHIP_RV635:
2367 tmp = TC_L2_SIZE(4);
2368 break;
2369 case CHIP_R600:
2370 tmp = TC_L2_SIZE(0) | L2_DISABLE_LATE_HIT;
2371 break;
2372 default:
2373 tmp = TC_L2_SIZE(0);
2374 break;
2375 }
2376 WREG32(TC_CNTL, tmp);
2377
2378 tmp = RREG32(HDP_HOST_PATH_CNTL);
2379 WREG32(HDP_HOST_PATH_CNTL, tmp);
2380
2381 tmp = RREG32(ARB_POP);
2382 tmp |= ENABLE_TC128;
2383 WREG32(ARB_POP, tmp);
2384
2385 WREG32(PA_SC_MULTI_CHIP_CNTL, 0);
2386 WREG32(PA_CL_ENHANCE, (CLIP_VTX_REORDER_ENA |
2387 NUM_CLIP_SEQ(3)));
2388 WREG32(PA_SC_ENHANCE, FORCE_EOV_MAX_CLK_CNT(4095));
2389 WREG32(VC_ENHANCE, 0);
2390}
2391
2392
2393
2394
2395
2396u32 r600_pciep_rreg(struct radeon_device *rdev, u32 reg)
2397{
2398 unsigned long flags;
2399 u32 r;
2400
2401 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
2402 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
2403 (void)RREG32(PCIE_PORT_INDEX);
2404 r = RREG32(PCIE_PORT_DATA);
2405 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
2406 return r;
2407}
2408
2409void r600_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
2410{
2411 unsigned long flags;
2412
2413 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
2414 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
2415 (void)RREG32(PCIE_PORT_INDEX);
2416 WREG32(PCIE_PORT_DATA, (v));
2417 (void)RREG32(PCIE_PORT_DATA);
2418 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
2419}
2420
2421
2422
2423
2424void r600_cp_stop(struct radeon_device *rdev)
2425{
2426 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
2427 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
2428 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
2429 WREG32(SCRATCH_UMSK, 0);
2430 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
2431}
2432
2433int r600_init_microcode(struct radeon_device *rdev)
2434{
2435 const char *chip_name;
2436 const char *rlc_chip_name;
2437 const char *smc_chip_name = "RV770";
2438 size_t pfp_req_size, me_req_size, rlc_req_size, smc_req_size = 0;
2439 char fw_name[30];
2440 int err;
2441
2442 DRM_DEBUG("\n");
2443
2444 switch (rdev->family) {
2445 case CHIP_R600:
2446 chip_name = "R600";
2447 rlc_chip_name = "R600";
2448 break;
2449 case CHIP_RV610:
2450 chip_name = "RV610";
2451 rlc_chip_name = "R600";
2452 break;
2453 case CHIP_RV630:
2454 chip_name = "RV630";
2455 rlc_chip_name = "R600";
2456 break;
2457 case CHIP_RV620:
2458 chip_name = "RV620";
2459 rlc_chip_name = "R600";
2460 break;
2461 case CHIP_RV635:
2462 chip_name = "RV635";
2463 rlc_chip_name = "R600";
2464 break;
2465 case CHIP_RV670:
2466 chip_name = "RV670";
2467 rlc_chip_name = "R600";
2468 break;
2469 case CHIP_RS780:
2470 case CHIP_RS880:
2471 chip_name = "RS780";
2472 rlc_chip_name = "R600";
2473 break;
2474 case CHIP_RV770:
2475 chip_name = "RV770";
2476 rlc_chip_name = "R700";
2477 smc_chip_name = "RV770";
2478 smc_req_size = ALIGN(RV770_SMC_UCODE_SIZE, 4);
2479 break;
2480 case CHIP_RV730:
2481 chip_name = "RV730";
2482 rlc_chip_name = "R700";
2483 smc_chip_name = "RV730";
2484 smc_req_size = ALIGN(RV730_SMC_UCODE_SIZE, 4);
2485 break;
2486 case CHIP_RV710:
2487 chip_name = "RV710";
2488 rlc_chip_name = "R700";
2489 smc_chip_name = "RV710";
2490 smc_req_size = ALIGN(RV710_SMC_UCODE_SIZE, 4);
2491 break;
2492 case CHIP_RV740:
2493 chip_name = "RV730";
2494 rlc_chip_name = "R700";
2495 smc_chip_name = "RV740";
2496 smc_req_size = ALIGN(RV740_SMC_UCODE_SIZE, 4);
2497 break;
2498 case CHIP_CEDAR:
2499 chip_name = "CEDAR";
2500 rlc_chip_name = "CEDAR";
2501 smc_chip_name = "CEDAR";
2502 smc_req_size = ALIGN(CEDAR_SMC_UCODE_SIZE, 4);
2503 break;
2504 case CHIP_REDWOOD:
2505 chip_name = "REDWOOD";
2506 rlc_chip_name = "REDWOOD";
2507 smc_chip_name = "REDWOOD";
2508 smc_req_size = ALIGN(REDWOOD_SMC_UCODE_SIZE, 4);
2509 break;
2510 case CHIP_JUNIPER:
2511 chip_name = "JUNIPER";
2512 rlc_chip_name = "JUNIPER";
2513 smc_chip_name = "JUNIPER";
2514 smc_req_size = ALIGN(JUNIPER_SMC_UCODE_SIZE, 4);
2515 break;
2516 case CHIP_CYPRESS:
2517 case CHIP_HEMLOCK:
2518 chip_name = "CYPRESS";
2519 rlc_chip_name = "CYPRESS";
2520 smc_chip_name = "CYPRESS";
2521 smc_req_size = ALIGN(CYPRESS_SMC_UCODE_SIZE, 4);
2522 break;
2523 case CHIP_PALM:
2524 chip_name = "PALM";
2525 rlc_chip_name = "SUMO";
2526 break;
2527 case CHIP_SUMO:
2528 chip_name = "SUMO";
2529 rlc_chip_name = "SUMO";
2530 break;
2531 case CHIP_SUMO2:
2532 chip_name = "SUMO2";
2533 rlc_chip_name = "SUMO";
2534 break;
2535 default: BUG();
2536 }
2537
2538 if (rdev->family >= CHIP_CEDAR) {
2539 pfp_req_size = EVERGREEN_PFP_UCODE_SIZE * 4;
2540 me_req_size = EVERGREEN_PM4_UCODE_SIZE * 4;
2541 rlc_req_size = EVERGREEN_RLC_UCODE_SIZE * 4;
2542 } else if (rdev->family >= CHIP_RV770) {
2543 pfp_req_size = R700_PFP_UCODE_SIZE * 4;
2544 me_req_size = R700_PM4_UCODE_SIZE * 4;
2545 rlc_req_size = R700_RLC_UCODE_SIZE * 4;
2546 } else {
2547 pfp_req_size = R600_PFP_UCODE_SIZE * 4;
2548 me_req_size = R600_PM4_UCODE_SIZE * 12;
2549 rlc_req_size = R600_RLC_UCODE_SIZE * 4;
2550 }
2551
2552 DRM_INFO("Loading %s Microcode\n", chip_name);
2553
2554 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
2555 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
2556 if (err)
2557 goto out;
2558 if (rdev->pfp_fw->size != pfp_req_size) {
2559 pr_err("r600_cp: Bogus length %zu in firmware \"%s\"\n",
2560 rdev->pfp_fw->size, fw_name);
2561 err = -EINVAL;
2562 goto out;
2563 }
2564
2565 snprintf(fw_name, sizeof(fw_name), "radeon/%s_me.bin", chip_name);
2566 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
2567 if (err)
2568 goto out;
2569 if (rdev->me_fw->size != me_req_size) {
2570 pr_err("r600_cp: Bogus length %zu in firmware \"%s\"\n",
2571 rdev->me_fw->size, fw_name);
2572 err = -EINVAL;
2573 }
2574
2575 snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
2576 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
2577 if (err)
2578 goto out;
2579 if (rdev->rlc_fw->size != rlc_req_size) {
2580 pr_err("r600_rlc: Bogus length %zu in firmware \"%s\"\n",
2581 rdev->rlc_fw->size, fw_name);
2582 err = -EINVAL;
2583 }
2584
2585 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_HEMLOCK)) {
2586 snprintf(fw_name, sizeof(fw_name), "radeon/%s_smc.bin", smc_chip_name);
2587 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
2588 if (err) {
2589 pr_err("smc: error loading firmware \"%s\"\n", fw_name);
2590 release_firmware(rdev->smc_fw);
2591 rdev->smc_fw = NULL;
2592 err = 0;
2593 } else if (rdev->smc_fw->size != smc_req_size) {
2594 pr_err("smc: Bogus length %zu in firmware \"%s\"\n",
2595 rdev->smc_fw->size, fw_name);
2596 err = -EINVAL;
2597 }
2598 }
2599
2600out:
2601 if (err) {
2602 if (err != -EINVAL)
2603 pr_err("r600_cp: Failed to load firmware \"%s\"\n",
2604 fw_name);
2605 release_firmware(rdev->pfp_fw);
2606 rdev->pfp_fw = NULL;
2607 release_firmware(rdev->me_fw);
2608 rdev->me_fw = NULL;
2609 release_firmware(rdev->rlc_fw);
2610 rdev->rlc_fw = NULL;
2611 release_firmware(rdev->smc_fw);
2612 rdev->smc_fw = NULL;
2613 }
2614 return err;
2615}
2616
2617u32 r600_gfx_get_rptr(struct radeon_device *rdev,
2618 struct radeon_ring *ring)
2619{
2620 u32 rptr;
2621
2622 if (rdev->wb.enabled)
2623 rptr = rdev->wb.wb[ring->rptr_offs/4];
2624 else
2625 rptr = RREG32(R600_CP_RB_RPTR);
2626
2627 return rptr;
2628}
2629
2630u32 r600_gfx_get_wptr(struct radeon_device *rdev,
2631 struct radeon_ring *ring)
2632{
2633 return RREG32(R600_CP_RB_WPTR);
2634}
2635
2636void r600_gfx_set_wptr(struct radeon_device *rdev,
2637 struct radeon_ring *ring)
2638{
2639 WREG32(R600_CP_RB_WPTR, ring->wptr);
2640 (void)RREG32(R600_CP_RB_WPTR);
2641}
2642
2643static int r600_cp_load_microcode(struct radeon_device *rdev)
2644{
2645 const __be32 *fw_data;
2646 int i;
2647
2648 if (!rdev->me_fw || !rdev->pfp_fw)
2649 return -EINVAL;
2650
2651 r600_cp_stop(rdev);
2652
2653 WREG32(CP_RB_CNTL,
2654#ifdef __BIG_ENDIAN
2655 BUF_SWAP_32BIT |
2656#endif
2657 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2658
2659
2660 WREG32(GRBM_SOFT_RESET, SOFT_RESET_CP);
2661 RREG32(GRBM_SOFT_RESET);
2662 mdelay(15);
2663 WREG32(GRBM_SOFT_RESET, 0);
2664
2665 WREG32(CP_ME_RAM_WADDR, 0);
2666
2667 fw_data = (const __be32 *)rdev->me_fw->data;
2668 WREG32(CP_ME_RAM_WADDR, 0);
2669 for (i = 0; i < R600_PM4_UCODE_SIZE * 3; i++)
2670 WREG32(CP_ME_RAM_DATA,
2671 be32_to_cpup(fw_data++));
2672
2673 fw_data = (const __be32 *)rdev->pfp_fw->data;
2674 WREG32(CP_PFP_UCODE_ADDR, 0);
2675 for (i = 0; i < R600_PFP_UCODE_SIZE; i++)
2676 WREG32(CP_PFP_UCODE_DATA,
2677 be32_to_cpup(fw_data++));
2678
2679 WREG32(CP_PFP_UCODE_ADDR, 0);
2680 WREG32(CP_ME_RAM_WADDR, 0);
2681 WREG32(CP_ME_RAM_RADDR, 0);
2682 return 0;
2683}
2684
2685int r600_cp_start(struct radeon_device *rdev)
2686{
2687 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2688 int r;
2689 uint32_t cp_me;
2690
2691 r = radeon_ring_lock(rdev, ring, 7);
2692 if (r) {
2693 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2694 return r;
2695 }
2696 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2697 radeon_ring_write(ring, 0x1);
2698 if (rdev->family >= CHIP_RV770) {
2699 radeon_ring_write(ring, 0x0);
2700 radeon_ring_write(ring, rdev->config.rv770.max_hw_contexts - 1);
2701 } else {
2702 radeon_ring_write(ring, 0x3);
2703 radeon_ring_write(ring, rdev->config.r600.max_hw_contexts - 1);
2704 }
2705 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2706 radeon_ring_write(ring, 0);
2707 radeon_ring_write(ring, 0);
2708 radeon_ring_unlock_commit(rdev, ring, false);
2709
2710 cp_me = 0xff;
2711 WREG32(R_0086D8_CP_ME_CNTL, cp_me);
2712 return 0;
2713}
2714
2715int r600_cp_resume(struct radeon_device *rdev)
2716{
2717 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2718 u32 tmp;
2719 u32 rb_bufsz;
2720 int r;
2721
2722
2723 WREG32(GRBM_SOFT_RESET, SOFT_RESET_CP);
2724 RREG32(GRBM_SOFT_RESET);
2725 mdelay(15);
2726 WREG32(GRBM_SOFT_RESET, 0);
2727
2728
2729 rb_bufsz = order_base_2(ring->ring_size / 8);
2730 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2731#ifdef __BIG_ENDIAN
2732 tmp |= BUF_SWAP_32BIT;
2733#endif
2734 WREG32(CP_RB_CNTL, tmp);
2735 WREG32(CP_SEM_WAIT_TIMER, 0x0);
2736
2737
2738 WREG32(CP_RB_WPTR_DELAY, 0);
2739
2740
2741 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2742 WREG32(CP_RB_RPTR_WR, 0);
2743 ring->wptr = 0;
2744 WREG32(CP_RB_WPTR, ring->wptr);
2745
2746
2747 WREG32(CP_RB_RPTR_ADDR,
2748 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2749 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2750 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2751
2752 if (rdev->wb.enabled)
2753 WREG32(SCRATCH_UMSK, 0xff);
2754 else {
2755 tmp |= RB_NO_UPDATE;
2756 WREG32(SCRATCH_UMSK, 0);
2757 }
2758
2759 mdelay(1);
2760 WREG32(CP_RB_CNTL, tmp);
2761
2762 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2763 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2764
2765 r600_cp_start(rdev);
2766 ring->ready = true;
2767 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2768 if (r) {
2769 ring->ready = false;
2770 return r;
2771 }
2772
2773 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
2774 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
2775
2776 return 0;
2777}
2778
2779void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size)
2780{
2781 u32 rb_bufsz;
2782 int r;
2783
2784
2785 rb_bufsz = order_base_2(ring_size / 8);
2786 ring_size = (1 << (rb_bufsz + 1)) * 4;
2787 ring->ring_size = ring_size;
2788 ring->align_mask = 16 - 1;
2789
2790 if (radeon_ring_supports_scratch_reg(rdev, ring)) {
2791 r = radeon_scratch_get(rdev, &ring->rptr_save_reg);
2792 if (r) {
2793 DRM_ERROR("failed to get scratch reg for rptr save (%d).\n", r);
2794 ring->rptr_save_reg = 0;
2795 }
2796 }
2797}
2798
2799void r600_cp_fini(struct radeon_device *rdev)
2800{
2801 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2802 r600_cp_stop(rdev);
2803 radeon_ring_fini(rdev, ring);
2804 radeon_scratch_free(rdev, ring->rptr_save_reg);
2805}
2806
2807
2808
2809
2810void r600_scratch_init(struct radeon_device *rdev)
2811{
2812 int i;
2813
2814 rdev->scratch.num_reg = 7;
2815 rdev->scratch.reg_base = SCRATCH_REG0;
2816 for (i = 0; i < rdev->scratch.num_reg; i++) {
2817 rdev->scratch.free[i] = true;
2818 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
2819 }
2820}
2821
2822int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
2823{
2824 uint32_t scratch;
2825 uint32_t tmp = 0;
2826 unsigned i;
2827 int r;
2828
2829 r = radeon_scratch_get(rdev, &scratch);
2830 if (r) {
2831 DRM_ERROR("radeon: cp failed to get scratch reg (%d).\n", r);
2832 return r;
2833 }
2834 WREG32(scratch, 0xCAFEDEAD);
2835 r = radeon_ring_lock(rdev, ring, 3);
2836 if (r) {
2837 DRM_ERROR("radeon: cp failed to lock ring %d (%d).\n", ring->idx, r);
2838 radeon_scratch_free(rdev, scratch);
2839 return r;
2840 }
2841 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2842 radeon_ring_write(ring, ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2843 radeon_ring_write(ring, 0xDEADBEEF);
2844 radeon_ring_unlock_commit(rdev, ring, false);
2845 for (i = 0; i < rdev->usec_timeout; i++) {
2846 tmp = RREG32(scratch);
2847 if (tmp == 0xDEADBEEF)
2848 break;
2849 udelay(1);
2850 }
2851 if (i < rdev->usec_timeout) {
2852 DRM_INFO("ring test on %d succeeded in %d usecs\n", ring->idx, i);
2853 } else {
2854 DRM_ERROR("radeon: ring %d test failed (scratch(0x%04X)=0x%08X)\n",
2855 ring->idx, scratch, tmp);
2856 r = -EINVAL;
2857 }
2858 radeon_scratch_free(rdev, scratch);
2859 return r;
2860}
2861
2862
2863
2864
2865
2866void r600_fence_ring_emit(struct radeon_device *rdev,
2867 struct radeon_fence *fence)
2868{
2869 struct radeon_ring *ring = &rdev->ring[fence->ring];
2870 u32 cp_coher_cntl = PACKET3_TC_ACTION_ENA | PACKET3_VC_ACTION_ENA |
2871 PACKET3_SH_ACTION_ENA;
2872
2873 if (rdev->family >= CHIP_RV770)
2874 cp_coher_cntl |= PACKET3_FULL_CACHE_ENA;
2875
2876 if (rdev->wb.use_event) {
2877 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
2878
2879 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
2880 radeon_ring_write(ring, cp_coher_cntl);
2881 radeon_ring_write(ring, 0xFFFFFFFF);
2882 radeon_ring_write(ring, 0);
2883 radeon_ring_write(ring, 10);
2884
2885 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
2886 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT_TS) | EVENT_INDEX(5));
2887 radeon_ring_write(ring, lower_32_bits(addr));
2888 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
2889 radeon_ring_write(ring, fence->seq);
2890 radeon_ring_write(ring, 0);
2891 } else {
2892
2893 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
2894 radeon_ring_write(ring, cp_coher_cntl);
2895 radeon_ring_write(ring, 0xFFFFFFFF);
2896 radeon_ring_write(ring, 0);
2897 radeon_ring_write(ring, 10);
2898 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
2899 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT) | EVENT_INDEX(0));
2900
2901 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2902 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2903 radeon_ring_write(ring, WAIT_3D_IDLE_bit | WAIT_3D_IDLECLEAN_bit);
2904
2905 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2906 radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2907 radeon_ring_write(ring, fence->seq);
2908
2909 radeon_ring_write(ring, PACKET0(CP_INT_STATUS, 0));
2910 radeon_ring_write(ring, RB_INT_STAT);
2911 }
2912}
2913
2914
2915
2916
2917
2918
2919
2920
2921
2922
2923
2924
2925bool r600_semaphore_ring_emit(struct radeon_device *rdev,
2926 struct radeon_ring *ring,
2927 struct radeon_semaphore *semaphore,
2928 bool emit_wait)
2929{
2930 uint64_t addr = semaphore->gpu_addr;
2931 unsigned sel = emit_wait ? PACKET3_SEM_SEL_WAIT : PACKET3_SEM_SEL_SIGNAL;
2932
2933 if (rdev->family < CHIP_CAYMAN)
2934 sel |= PACKET3_SEM_WAIT_ON_SIGNAL;
2935
2936 radeon_ring_write(ring, PACKET3(PACKET3_MEM_SEMAPHORE, 1));
2937 radeon_ring_write(ring, lower_32_bits(addr));
2938 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | sel);
2939
2940
2941 if (emit_wait && (rdev->family >= CHIP_CEDAR)) {
2942
2943 radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
2944 radeon_ring_write(ring, 0x0);
2945 }
2946
2947 return true;
2948}
2949
2950
2951
2952
2953
2954
2955
2956
2957
2958
2959
2960
2961
2962
2963struct radeon_fence *r600_copy_cpdma(struct radeon_device *rdev,
2964 uint64_t src_offset, uint64_t dst_offset,
2965 unsigned num_gpu_pages,
2966 struct dma_resv *resv)
2967{
2968 struct radeon_fence *fence;
2969 struct radeon_sync sync;
2970 int ring_index = rdev->asic->copy.blit_ring_index;
2971 struct radeon_ring *ring = &rdev->ring[ring_index];
2972 u32 size_in_bytes, cur_size_in_bytes, tmp;
2973 int i, num_loops;
2974 int r = 0;
2975
2976 radeon_sync_create(&sync);
2977
2978 size_in_bytes = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT);
2979 num_loops = DIV_ROUND_UP(size_in_bytes, 0x1fffff);
2980 r = radeon_ring_lock(rdev, ring, num_loops * 6 + 24);
2981 if (r) {
2982 DRM_ERROR("radeon: moving bo (%d).\n", r);
2983 radeon_sync_free(rdev, &sync, NULL);
2984 return ERR_PTR(r);
2985 }
2986
2987 radeon_sync_resv(rdev, &sync, resv, false);
2988 radeon_sync_rings(rdev, &sync, ring->idx);
2989
2990 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2991 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2992 radeon_ring_write(ring, WAIT_3D_IDLE_bit);
2993 for (i = 0; i < num_loops; i++) {
2994 cur_size_in_bytes = size_in_bytes;
2995 if (cur_size_in_bytes > 0x1fffff)
2996 cur_size_in_bytes = 0x1fffff;
2997 size_in_bytes -= cur_size_in_bytes;
2998 tmp = upper_32_bits(src_offset) & 0xff;
2999 if (size_in_bytes == 0)
3000 tmp |= PACKET3_CP_DMA_CP_SYNC;
3001 radeon_ring_write(ring, PACKET3(PACKET3_CP_DMA, 4));
3002 radeon_ring_write(ring, lower_32_bits(src_offset));
3003 radeon_ring_write(ring, tmp);
3004 radeon_ring_write(ring, lower_32_bits(dst_offset));
3005 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
3006 radeon_ring_write(ring, cur_size_in_bytes);
3007 src_offset += cur_size_in_bytes;
3008 dst_offset += cur_size_in_bytes;
3009 }
3010 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3011 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
3012 radeon_ring_write(ring, WAIT_CP_DMA_IDLE_bit);
3013
3014 r = radeon_fence_emit(rdev, &fence, ring->idx);
3015 if (r) {
3016 radeon_ring_unlock_undo(rdev, ring);
3017 radeon_sync_free(rdev, &sync, NULL);
3018 return ERR_PTR(r);
3019 }
3020
3021 radeon_ring_unlock_commit(rdev, ring, false);
3022 radeon_sync_free(rdev, &sync, fence);
3023
3024 return fence;
3025}
3026
3027int r600_set_surface_reg(struct radeon_device *rdev, int reg,
3028 uint32_t tiling_flags, uint32_t pitch,
3029 uint32_t offset, uint32_t obj_size)
3030{
3031
3032 return 0;
3033}
3034
3035void r600_clear_surface_reg(struct radeon_device *rdev, int reg)
3036{
3037
3038}
3039
3040static void r600_uvd_init(struct radeon_device *rdev)
3041{
3042 int r;
3043
3044 if (!rdev->has_uvd)
3045 return;
3046
3047 r = radeon_uvd_init(rdev);
3048 if (r) {
3049 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
3050
3051
3052
3053
3054
3055
3056 rdev->has_uvd = false;
3057 return;
3058 }
3059 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
3060 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
3061}
3062
3063static void r600_uvd_start(struct radeon_device *rdev)
3064{
3065 int r;
3066
3067 if (!rdev->has_uvd)
3068 return;
3069
3070 r = uvd_v1_0_resume(rdev);
3071 if (r) {
3072 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
3073 goto error;
3074 }
3075 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
3076 if (r) {
3077 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
3078 goto error;
3079 }
3080 return;
3081
3082error:
3083 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
3084}
3085
3086static void r600_uvd_resume(struct radeon_device *rdev)
3087{
3088 struct radeon_ring *ring;
3089 int r;
3090
3091 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
3092 return;
3093
3094 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
3095 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
3096 if (r) {
3097 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
3098 return;
3099 }
3100 r = uvd_v1_0_init(rdev);
3101 if (r) {
3102 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
3103 return;
3104 }
3105}
3106
3107static int r600_startup(struct radeon_device *rdev)
3108{
3109 struct radeon_ring *ring;
3110 int r;
3111
3112
3113 r600_pcie_gen2_enable(rdev);
3114
3115
3116 r = r600_vram_scratch_init(rdev);
3117 if (r)
3118 return r;
3119
3120 r600_mc_program(rdev);
3121
3122 if (rdev->flags & RADEON_IS_AGP) {
3123 r600_agp_enable(rdev);
3124 } else {
3125 r = r600_pcie_gart_enable(rdev);
3126 if (r)
3127 return r;
3128 }
3129 r600_gpu_init(rdev);
3130
3131
3132 r = radeon_wb_init(rdev);
3133 if (r)
3134 return r;
3135
3136 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
3137 if (r) {
3138 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
3139 return r;
3140 }
3141
3142 r600_uvd_start(rdev);
3143
3144
3145 if (!rdev->irq.installed) {
3146 r = radeon_irq_kms_init(rdev);
3147 if (r)
3148 return r;
3149 }
3150
3151 r = r600_irq_init(rdev);
3152 if (r) {
3153 DRM_ERROR("radeon: IH init failed (%d).\n", r);
3154 radeon_irq_kms_fini(rdev);
3155 return r;
3156 }
3157 r600_irq_set(rdev);
3158
3159 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3160 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
3161 RADEON_CP_PACKET2);
3162 if (r)
3163 return r;
3164
3165 r = r600_cp_load_microcode(rdev);
3166 if (r)
3167 return r;
3168 r = r600_cp_resume(rdev);
3169 if (r)
3170 return r;
3171
3172 r600_uvd_resume(rdev);
3173
3174 r = radeon_ib_pool_init(rdev);
3175 if (r) {
3176 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
3177 return r;
3178 }
3179
3180 r = radeon_audio_init(rdev);
3181 if (r) {
3182 DRM_ERROR("radeon: audio init failed\n");
3183 return r;
3184 }
3185
3186 return 0;
3187}
3188
3189void r600_vga_set_state(struct radeon_device *rdev, bool state)
3190{
3191 uint32_t temp;
3192
3193 temp = RREG32(CONFIG_CNTL);
3194 if (!state) {
3195 temp &= ~(1<<0);
3196 temp |= (1<<1);
3197 } else {
3198 temp &= ~(1<<1);
3199 }
3200 WREG32(CONFIG_CNTL, temp);
3201}
3202
3203int r600_resume(struct radeon_device *rdev)
3204{
3205 int r;
3206
3207
3208
3209
3210
3211
3212 atom_asic_init(rdev->mode_info.atom_context);
3213
3214 if (rdev->pm.pm_method == PM_METHOD_DPM)
3215 radeon_pm_resume(rdev);
3216
3217 rdev->accel_working = true;
3218 r = r600_startup(rdev);
3219 if (r) {
3220 DRM_ERROR("r600 startup failed on resume\n");
3221 rdev->accel_working = false;
3222 return r;
3223 }
3224
3225 return r;
3226}
3227
3228int r600_suspend(struct radeon_device *rdev)
3229{
3230 radeon_pm_suspend(rdev);
3231 radeon_audio_fini(rdev);
3232 r600_cp_stop(rdev);
3233 if (rdev->has_uvd) {
3234 uvd_v1_0_fini(rdev);
3235 radeon_uvd_suspend(rdev);
3236 }
3237 r600_irq_suspend(rdev);
3238 radeon_wb_disable(rdev);
3239 r600_pcie_gart_disable(rdev);
3240
3241 return 0;
3242}
3243
3244
3245
3246
3247
3248
3249
3250int r600_init(struct radeon_device *rdev)
3251{
3252 int r;
3253
3254 if (r600_debugfs_mc_info_init(rdev)) {
3255 DRM_ERROR("Failed to register debugfs file for mc !\n");
3256 }
3257
3258 if (!radeon_get_bios(rdev)) {
3259 if (ASIC_IS_AVIVO(rdev))
3260 return -EINVAL;
3261 }
3262
3263 if (!rdev->is_atom_bios) {
3264 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
3265 return -EINVAL;
3266 }
3267 r = radeon_atombios_init(rdev);
3268 if (r)
3269 return r;
3270
3271 if (!radeon_card_posted(rdev)) {
3272 if (!rdev->bios) {
3273 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
3274 return -EINVAL;
3275 }
3276 DRM_INFO("GPU not posted. posting now...\n");
3277 atom_asic_init(rdev->mode_info.atom_context);
3278 }
3279
3280 r600_scratch_init(rdev);
3281
3282 radeon_surface_init(rdev);
3283
3284 radeon_get_clock_info(rdev->ddev);
3285
3286 r = radeon_fence_driver_init(rdev);
3287 if (r)
3288 return r;
3289 if (rdev->flags & RADEON_IS_AGP) {
3290 r = radeon_agp_init(rdev);
3291 if (r)
3292 radeon_agp_disable(rdev);
3293 }
3294 r = r600_mc_init(rdev);
3295 if (r)
3296 return r;
3297
3298 r = radeon_bo_init(rdev);
3299 if (r)
3300 return r;
3301
3302 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
3303 r = r600_init_microcode(rdev);
3304 if (r) {
3305 DRM_ERROR("Failed to load firmware!\n");
3306 return r;
3307 }
3308 }
3309
3310
3311 radeon_pm_init(rdev);
3312
3313 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
3314 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
3315
3316 r600_uvd_init(rdev);
3317
3318 rdev->ih.ring_obj = NULL;
3319 r600_ih_ring_init(rdev, 64 * 1024);
3320
3321 r = r600_pcie_gart_init(rdev);
3322 if (r)
3323 return r;
3324
3325 rdev->accel_working = true;
3326 r = r600_startup(rdev);
3327 if (r) {
3328 dev_err(rdev->dev, "disabling GPU acceleration\n");
3329 r600_cp_fini(rdev);
3330 r600_irq_fini(rdev);
3331 radeon_wb_fini(rdev);
3332 radeon_ib_pool_fini(rdev);
3333 radeon_irq_kms_fini(rdev);
3334 r600_pcie_gart_fini(rdev);
3335 rdev->accel_working = false;
3336 }
3337
3338 return 0;
3339}
3340
3341void r600_fini(struct radeon_device *rdev)
3342{
3343 radeon_pm_fini(rdev);
3344 radeon_audio_fini(rdev);
3345 r600_cp_fini(rdev);
3346 r600_irq_fini(rdev);
3347 if (rdev->has_uvd) {
3348 uvd_v1_0_fini(rdev);
3349 radeon_uvd_fini(rdev);
3350 }
3351 radeon_wb_fini(rdev);
3352 radeon_ib_pool_fini(rdev);
3353 radeon_irq_kms_fini(rdev);
3354 r600_pcie_gart_fini(rdev);
3355 r600_vram_scratch_fini(rdev);
3356 radeon_agp_fini(rdev);
3357 radeon_gem_fini(rdev);
3358 radeon_fence_driver_fini(rdev);
3359 radeon_bo_fini(rdev);
3360 radeon_atombios_fini(rdev);
3361 kfree(rdev->bios);
3362 rdev->bios = NULL;
3363}
3364
3365
3366
3367
3368
3369void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3370{
3371 struct radeon_ring *ring = &rdev->ring[ib->ring];
3372 u32 next_rptr;
3373
3374 if (ring->rptr_save_reg) {
3375 next_rptr = ring->wptr + 3 + 4;
3376 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3377 radeon_ring_write(ring, ((ring->rptr_save_reg -
3378 PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
3379 radeon_ring_write(ring, next_rptr);
3380 } else if (rdev->wb.enabled) {
3381 next_rptr = ring->wptr + 5 + 4;
3382 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3383 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3384 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3385 radeon_ring_write(ring, next_rptr);
3386 radeon_ring_write(ring, 0);
3387 }
3388
3389 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3390 radeon_ring_write(ring,
3391#ifdef __BIG_ENDIAN
3392 (2 << 0) |
3393#endif
3394 (ib->gpu_addr & 0xFFFFFFFC));
3395 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3396 radeon_ring_write(ring, ib->length_dw);
3397}
3398
3399int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
3400{
3401 struct radeon_ib ib;
3402 uint32_t scratch;
3403 uint32_t tmp = 0;
3404 unsigned i;
3405 int r;
3406
3407 r = radeon_scratch_get(rdev, &scratch);
3408 if (r) {
3409 DRM_ERROR("radeon: failed to get scratch reg (%d).\n", r);
3410 return r;
3411 }
3412 WREG32(scratch, 0xCAFEDEAD);
3413 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
3414 if (r) {
3415 DRM_ERROR("radeon: failed to get ib (%d).\n", r);
3416 goto free_scratch;
3417 }
3418 ib.ptr[0] = PACKET3(PACKET3_SET_CONFIG_REG, 1);
3419 ib.ptr[1] = ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
3420 ib.ptr[2] = 0xDEADBEEF;
3421 ib.length_dw = 3;
3422 r = radeon_ib_schedule(rdev, &ib, NULL, false);
3423 if (r) {
3424 DRM_ERROR("radeon: failed to schedule ib (%d).\n", r);
3425 goto free_ib;
3426 }
3427 r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies(
3428 RADEON_USEC_IB_TEST_TIMEOUT));
3429 if (r < 0) {
3430 DRM_ERROR("radeon: fence wait failed (%d).\n", r);
3431 goto free_ib;
3432 } else if (r == 0) {
3433 DRM_ERROR("radeon: fence wait timed out.\n");
3434 r = -ETIMEDOUT;
3435 goto free_ib;
3436 }
3437 r = 0;
3438 for (i = 0; i < rdev->usec_timeout; i++) {
3439 tmp = RREG32(scratch);
3440 if (tmp == 0xDEADBEEF)
3441 break;
3442 udelay(1);
3443 }
3444 if (i < rdev->usec_timeout) {
3445 DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
3446 } else {
3447 DRM_ERROR("radeon: ib test failed (scratch(0x%04X)=0x%08X)\n",
3448 scratch, tmp);
3449 r = -EINVAL;
3450 }
3451free_ib:
3452 radeon_ib_free(rdev, &ib);
3453free_scratch:
3454 radeon_scratch_free(rdev, scratch);
3455 return r;
3456}
3457
3458
3459
3460
3461
3462
3463
3464
3465
3466
3467
3468
3469void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size)
3470{
3471 u32 rb_bufsz;
3472
3473
3474 rb_bufsz = order_base_2(ring_size / 4);
3475 ring_size = (1 << rb_bufsz) * 4;
3476 rdev->ih.ring_size = ring_size;
3477 rdev->ih.ptr_mask = rdev->ih.ring_size - 1;
3478 rdev->ih.rptr = 0;
3479}
3480
3481int r600_ih_ring_alloc(struct radeon_device *rdev)
3482{
3483 int r;
3484
3485
3486 if (rdev->ih.ring_obj == NULL) {
3487 r = radeon_bo_create(rdev, rdev->ih.ring_size,
3488 PAGE_SIZE, true,
3489 RADEON_GEM_DOMAIN_GTT, 0,
3490 NULL, NULL, &rdev->ih.ring_obj);
3491 if (r) {
3492 DRM_ERROR("radeon: failed to create ih ring buffer (%d).\n", r);
3493 return r;
3494 }
3495 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
3496 if (unlikely(r != 0))
3497 return r;
3498 r = radeon_bo_pin(rdev->ih.ring_obj,
3499 RADEON_GEM_DOMAIN_GTT,
3500 &rdev->ih.gpu_addr);
3501 if (r) {
3502 radeon_bo_unreserve(rdev->ih.ring_obj);
3503 DRM_ERROR("radeon: failed to pin ih ring buffer (%d).\n", r);
3504 return r;
3505 }
3506 r = radeon_bo_kmap(rdev->ih.ring_obj,
3507 (void **)&rdev->ih.ring);
3508 radeon_bo_unreserve(rdev->ih.ring_obj);
3509 if (r) {
3510 DRM_ERROR("radeon: failed to map ih ring buffer (%d).\n", r);
3511 return r;
3512 }
3513 }
3514 return 0;
3515}
3516
3517void r600_ih_ring_fini(struct radeon_device *rdev)
3518{
3519 int r;
3520 if (rdev->ih.ring_obj) {
3521 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
3522 if (likely(r == 0)) {
3523 radeon_bo_kunmap(rdev->ih.ring_obj);
3524 radeon_bo_unpin(rdev->ih.ring_obj);
3525 radeon_bo_unreserve(rdev->ih.ring_obj);
3526 }
3527 radeon_bo_unref(&rdev->ih.ring_obj);
3528 rdev->ih.ring = NULL;
3529 rdev->ih.ring_obj = NULL;
3530 }
3531}
3532
3533void r600_rlc_stop(struct radeon_device *rdev)
3534{
3535
3536 if ((rdev->family >= CHIP_RV770) &&
3537 (rdev->family <= CHIP_RV740)) {
3538
3539 WREG32(SRBM_SOFT_RESET, SOFT_RESET_RLC);
3540 RREG32(SRBM_SOFT_RESET);
3541 mdelay(15);
3542 WREG32(SRBM_SOFT_RESET, 0);
3543 RREG32(SRBM_SOFT_RESET);
3544 }
3545
3546 WREG32(RLC_CNTL, 0);
3547}
3548
3549static void r600_rlc_start(struct radeon_device *rdev)
3550{
3551 WREG32(RLC_CNTL, RLC_ENABLE);
3552}
3553
3554static int r600_rlc_resume(struct radeon_device *rdev)
3555{
3556 u32 i;
3557 const __be32 *fw_data;
3558
3559 if (!rdev->rlc_fw)
3560 return -EINVAL;
3561
3562 r600_rlc_stop(rdev);
3563
3564 WREG32(RLC_HB_CNTL, 0);
3565
3566 WREG32(RLC_HB_BASE, 0);
3567 WREG32(RLC_HB_RPTR, 0);
3568 WREG32(RLC_HB_WPTR, 0);
3569 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
3570 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
3571 WREG32(RLC_MC_CNTL, 0);
3572 WREG32(RLC_UCODE_CNTL, 0);
3573
3574 fw_data = (const __be32 *)rdev->rlc_fw->data;
3575 if (rdev->family >= CHIP_RV770) {
3576 for (i = 0; i < R700_RLC_UCODE_SIZE; i++) {
3577 WREG32(RLC_UCODE_ADDR, i);
3578 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
3579 }
3580 } else {
3581 for (i = 0; i < R600_RLC_UCODE_SIZE; i++) {
3582 WREG32(RLC_UCODE_ADDR, i);
3583 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
3584 }
3585 }
3586 WREG32(RLC_UCODE_ADDR, 0);
3587
3588 r600_rlc_start(rdev);
3589
3590 return 0;
3591}
3592
3593static void r600_enable_interrupts(struct radeon_device *rdev)
3594{
3595 u32 ih_cntl = RREG32(IH_CNTL);
3596 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
3597
3598 ih_cntl |= ENABLE_INTR;
3599 ih_rb_cntl |= IH_RB_ENABLE;
3600 WREG32(IH_CNTL, ih_cntl);
3601 WREG32(IH_RB_CNTL, ih_rb_cntl);
3602 rdev->ih.enabled = true;
3603}
3604
3605void r600_disable_interrupts(struct radeon_device *rdev)
3606{
3607 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
3608 u32 ih_cntl = RREG32(IH_CNTL);
3609
3610 ih_rb_cntl &= ~IH_RB_ENABLE;
3611 ih_cntl &= ~ENABLE_INTR;
3612 WREG32(IH_RB_CNTL, ih_rb_cntl);
3613 WREG32(IH_CNTL, ih_cntl);
3614
3615 WREG32(IH_RB_RPTR, 0);
3616 WREG32(IH_RB_WPTR, 0);
3617 rdev->ih.enabled = false;
3618 rdev->ih.rptr = 0;
3619}
3620
3621static void r600_disable_interrupt_state(struct radeon_device *rdev)
3622{
3623 u32 tmp;
3624
3625 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3626 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3627 WREG32(DMA_CNTL, tmp);
3628 WREG32(GRBM_INT_CNTL, 0);
3629 WREG32(DxMODE_INT_MASK, 0);
3630 WREG32(D1GRPH_INTERRUPT_CONTROL, 0);
3631 WREG32(D2GRPH_INTERRUPT_CONTROL, 0);
3632 if (ASIC_IS_DCE3(rdev)) {
3633 WREG32(DCE3_DACA_AUTODETECT_INT_CONTROL, 0);
3634 WREG32(DCE3_DACB_AUTODETECT_INT_CONTROL, 0);
3635 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3636 WREG32(DC_HPD1_INT_CONTROL, tmp);
3637 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3638 WREG32(DC_HPD2_INT_CONTROL, tmp);
3639 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3640 WREG32(DC_HPD3_INT_CONTROL, tmp);
3641 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3642 WREG32(DC_HPD4_INT_CONTROL, tmp);
3643 if (ASIC_IS_DCE32(rdev)) {
3644 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3645 WREG32(DC_HPD5_INT_CONTROL, tmp);
3646 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3647 WREG32(DC_HPD6_INT_CONTROL, tmp);
3648 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3649 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, tmp);
3650 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3651 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, tmp);
3652 } else {
3653 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3654 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
3655 tmp = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3656 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, tmp);
3657 }
3658 } else {
3659 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
3660 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3661 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3662 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
3663 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3664 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
3665 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3666 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
3667 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3668 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
3669 tmp = RREG32(HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3670 WREG32(HDMI1_AUDIO_PACKET_CONTROL, tmp);
3671 }
3672}
3673
3674int r600_irq_init(struct radeon_device *rdev)
3675{
3676 int ret = 0;
3677 int rb_bufsz;
3678 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
3679
3680
3681 ret = r600_ih_ring_alloc(rdev);
3682 if (ret)
3683 return ret;
3684
3685
3686 r600_disable_interrupts(rdev);
3687
3688
3689 if (rdev->family >= CHIP_CEDAR)
3690 ret = evergreen_rlc_resume(rdev);
3691 else
3692 ret = r600_rlc_resume(rdev);
3693 if (ret) {
3694 r600_ih_ring_fini(rdev);
3695 return ret;
3696 }
3697
3698
3699
3700 WREG32(INTERRUPT_CNTL2, rdev->dummy_page.addr >> 8);
3701 interrupt_cntl = RREG32(INTERRUPT_CNTL);
3702
3703
3704
3705 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
3706
3707 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
3708 WREG32(INTERRUPT_CNTL, interrupt_cntl);
3709
3710 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
3711 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
3712
3713 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
3714 IH_WPTR_OVERFLOW_CLEAR |
3715 (rb_bufsz << 1));
3716
3717 if (rdev->wb.enabled)
3718 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
3719
3720
3721 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
3722 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
3723
3724 WREG32(IH_RB_CNTL, ih_rb_cntl);
3725
3726
3727 WREG32(IH_RB_RPTR, 0);
3728 WREG32(IH_RB_WPTR, 0);
3729
3730
3731 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10);
3732
3733 if (rdev->msi_enabled)
3734 ih_cntl |= RPTR_REARM;
3735 WREG32(IH_CNTL, ih_cntl);
3736
3737
3738 if (rdev->family >= CHIP_CEDAR)
3739 evergreen_disable_interrupt_state(rdev);
3740 else
3741 r600_disable_interrupt_state(rdev);
3742
3743
3744 pci_set_master(rdev->pdev);
3745
3746
3747 r600_enable_interrupts(rdev);
3748
3749 return ret;
3750}
3751
3752void r600_irq_suspend(struct radeon_device *rdev)
3753{
3754 r600_irq_disable(rdev);
3755 r600_rlc_stop(rdev);
3756}
3757
3758void r600_irq_fini(struct radeon_device *rdev)
3759{
3760 r600_irq_suspend(rdev);
3761 r600_ih_ring_fini(rdev);
3762}
3763
3764int r600_irq_set(struct radeon_device *rdev)
3765{
3766 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3767 u32 mode_int = 0;
3768 u32 hpd1, hpd2, hpd3, hpd4 = 0, hpd5 = 0, hpd6 = 0;
3769 u32 grbm_int_cntl = 0;
3770 u32 hdmi0, hdmi1;
3771 u32 dma_cntl;
3772 u32 thermal_int = 0;
3773
3774 if (!rdev->irq.installed) {
3775 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3776 return -EINVAL;
3777 }
3778
3779 if (!rdev->ih.enabled) {
3780 r600_disable_interrupts(rdev);
3781
3782 r600_disable_interrupt_state(rdev);
3783 return 0;
3784 }
3785
3786 if (ASIC_IS_DCE3(rdev)) {
3787 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3788 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3789 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3790 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3791 if (ASIC_IS_DCE32(rdev)) {
3792 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3793 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3794 hdmi0 = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3795 hdmi1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3796 } else {
3797 hdmi0 = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3798 hdmi1 = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3799 }
3800 } else {
3801 hpd1 = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3802 hpd2 = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3803 hpd3 = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3804 hdmi0 = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3805 hdmi1 = RREG32(HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3806 }
3807
3808 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3809
3810 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) {
3811 thermal_int = RREG32(CG_THERMAL_INT) &
3812 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
3813 } else if (rdev->family >= CHIP_RV770) {
3814 thermal_int = RREG32(RV770_CG_THERMAL_INT) &
3815 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
3816 }
3817 if (rdev->irq.dpm_thermal) {
3818 DRM_DEBUG("dpm thermal\n");
3819 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
3820 }
3821
3822 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3823 DRM_DEBUG("r600_irq_set: sw int\n");
3824 cp_int_cntl |= RB_INT_ENABLE;
3825 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3826 }
3827
3828 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3829 DRM_DEBUG("r600_irq_set: sw int dma\n");
3830 dma_cntl |= TRAP_ENABLE;
3831 }
3832
3833 if (rdev->irq.crtc_vblank_int[0] ||
3834 atomic_read(&rdev->irq.pflip[0])) {
3835 DRM_DEBUG("r600_irq_set: vblank 0\n");
3836 mode_int |= D1MODE_VBLANK_INT_MASK;
3837 }
3838 if (rdev->irq.crtc_vblank_int[1] ||
3839 atomic_read(&rdev->irq.pflip[1])) {
3840 DRM_DEBUG("r600_irq_set: vblank 1\n");
3841 mode_int |= D2MODE_VBLANK_INT_MASK;
3842 }
3843 if (rdev->irq.hpd[0]) {
3844 DRM_DEBUG("r600_irq_set: hpd 1\n");
3845 hpd1 |= DC_HPDx_INT_EN;
3846 }
3847 if (rdev->irq.hpd[1]) {
3848 DRM_DEBUG("r600_irq_set: hpd 2\n");
3849 hpd2 |= DC_HPDx_INT_EN;
3850 }
3851 if (rdev->irq.hpd[2]) {
3852 DRM_DEBUG("r600_irq_set: hpd 3\n");
3853 hpd3 |= DC_HPDx_INT_EN;
3854 }
3855 if (rdev->irq.hpd[3]) {
3856 DRM_DEBUG("r600_irq_set: hpd 4\n");
3857 hpd4 |= DC_HPDx_INT_EN;
3858 }
3859 if (rdev->irq.hpd[4]) {
3860 DRM_DEBUG("r600_irq_set: hpd 5\n");
3861 hpd5 |= DC_HPDx_INT_EN;
3862 }
3863 if (rdev->irq.hpd[5]) {
3864 DRM_DEBUG("r600_irq_set: hpd 6\n");
3865 hpd6 |= DC_HPDx_INT_EN;
3866 }
3867 if (rdev->irq.afmt[0]) {
3868 DRM_DEBUG("r600_irq_set: hdmi 0\n");
3869 hdmi0 |= HDMI0_AZ_FORMAT_WTRIG_MASK;
3870 }
3871 if (rdev->irq.afmt[1]) {
3872 DRM_DEBUG("r600_irq_set: hdmi 0\n");
3873 hdmi1 |= HDMI0_AZ_FORMAT_WTRIG_MASK;
3874 }
3875
3876 WREG32(CP_INT_CNTL, cp_int_cntl);
3877 WREG32(DMA_CNTL, dma_cntl);
3878 WREG32(DxMODE_INT_MASK, mode_int);
3879 WREG32(D1GRPH_INTERRUPT_CONTROL, DxGRPH_PFLIP_INT_MASK);
3880 WREG32(D2GRPH_INTERRUPT_CONTROL, DxGRPH_PFLIP_INT_MASK);
3881 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3882 if (ASIC_IS_DCE3(rdev)) {
3883 WREG32(DC_HPD1_INT_CONTROL, hpd1);
3884 WREG32(DC_HPD2_INT_CONTROL, hpd2);
3885 WREG32(DC_HPD3_INT_CONTROL, hpd3);
3886 WREG32(DC_HPD4_INT_CONTROL, hpd4);
3887 if (ASIC_IS_DCE32(rdev)) {
3888 WREG32(DC_HPD5_INT_CONTROL, hpd5);
3889 WREG32(DC_HPD6_INT_CONTROL, hpd6);
3890 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, hdmi0);
3891 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, hdmi1);
3892 } else {
3893 WREG32(HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
3894 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, hdmi1);
3895 }
3896 } else {
3897 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, hpd1);
3898 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, hpd2);
3899 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, hpd3);
3900 WREG32(HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
3901 WREG32(HDMI1_AUDIO_PACKET_CONTROL, hdmi1);
3902 }
3903 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) {
3904 WREG32(CG_THERMAL_INT, thermal_int);
3905 } else if (rdev->family >= CHIP_RV770) {
3906 WREG32(RV770_CG_THERMAL_INT, thermal_int);
3907 }
3908
3909
3910 RREG32(R_000E50_SRBM_STATUS);
3911
3912 return 0;
3913}
3914
3915static void r600_irq_ack(struct radeon_device *rdev)
3916{
3917 u32 tmp;
3918
3919 if (ASIC_IS_DCE3(rdev)) {
3920 rdev->irq.stat_regs.r600.disp_int = RREG32(DCE3_DISP_INTERRUPT_STATUS);
3921 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE);
3922 rdev->irq.stat_regs.r600.disp_int_cont2 = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE2);
3923 if (ASIC_IS_DCE32(rdev)) {
3924 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET0);
3925 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET1);
3926 } else {
3927 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3928 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(DCE3_HDMI1_STATUS);
3929 }
3930 } else {
3931 rdev->irq.stat_regs.r600.disp_int = RREG32(DISP_INTERRUPT_STATUS);
3932 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
3933 rdev->irq.stat_regs.r600.disp_int_cont2 = 0;
3934 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3935 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(HDMI1_STATUS);
3936 }
3937 rdev->irq.stat_regs.r600.d1grph_int = RREG32(D1GRPH_INTERRUPT_STATUS);
3938 rdev->irq.stat_regs.r600.d2grph_int = RREG32(D2GRPH_INTERRUPT_STATUS);
3939
3940 if (rdev->irq.stat_regs.r600.d1grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3941 WREG32(D1GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3942 if (rdev->irq.stat_regs.r600.d2grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3943 WREG32(D2GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3944 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT)
3945 WREG32(D1MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3946 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT)
3947 WREG32(D1MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3948 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT)
3949 WREG32(D2MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3950 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT)
3951 WREG32(D2MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3952 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) {
3953 if (ASIC_IS_DCE3(rdev)) {
3954 tmp = RREG32(DC_HPD1_INT_CONTROL);
3955 tmp |= DC_HPDx_INT_ACK;
3956 WREG32(DC_HPD1_INT_CONTROL, tmp);
3957 } else {
3958 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
3959 tmp |= DC_HPDx_INT_ACK;
3960 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
3961 }
3962 }
3963 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) {
3964 if (ASIC_IS_DCE3(rdev)) {
3965 tmp = RREG32(DC_HPD2_INT_CONTROL);
3966 tmp |= DC_HPDx_INT_ACK;
3967 WREG32(DC_HPD2_INT_CONTROL, tmp);
3968 } else {
3969 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
3970 tmp |= DC_HPDx_INT_ACK;
3971 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
3972 }
3973 }
3974 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) {
3975 if (ASIC_IS_DCE3(rdev)) {
3976 tmp = RREG32(DC_HPD3_INT_CONTROL);
3977 tmp |= DC_HPDx_INT_ACK;
3978 WREG32(DC_HPD3_INT_CONTROL, tmp);
3979 } else {
3980 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
3981 tmp |= DC_HPDx_INT_ACK;
3982 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
3983 }
3984 }
3985 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) {
3986 tmp = RREG32(DC_HPD4_INT_CONTROL);
3987 tmp |= DC_HPDx_INT_ACK;
3988 WREG32(DC_HPD4_INT_CONTROL, tmp);
3989 }
3990 if (ASIC_IS_DCE32(rdev)) {
3991 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) {
3992 tmp = RREG32(DC_HPD5_INT_CONTROL);
3993 tmp |= DC_HPDx_INT_ACK;
3994 WREG32(DC_HPD5_INT_CONTROL, tmp);
3995 }
3996 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) {
3997 tmp = RREG32(DC_HPD6_INT_CONTROL);
3998 tmp |= DC_HPDx_INT_ACK;
3999 WREG32(DC_HPD6_INT_CONTROL, tmp);
4000 }
4001 if (rdev->irq.stat_regs.r600.hdmi0_status & AFMT_AZ_FORMAT_WTRIG) {
4002 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0);
4003 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4004 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, tmp);
4005 }
4006 if (rdev->irq.stat_regs.r600.hdmi1_status & AFMT_AZ_FORMAT_WTRIG) {
4007 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1);
4008 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4009 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, tmp);
4010 }
4011 } else {
4012 if (rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG) {
4013 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL);
4014 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
4015 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
4016 }
4017 if (rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG) {
4018 if (ASIC_IS_DCE3(rdev)) {
4019 tmp = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL);
4020 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
4021 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, tmp);
4022 } else {
4023 tmp = RREG32(HDMI1_AUDIO_PACKET_CONTROL);
4024 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
4025 WREG32(HDMI1_AUDIO_PACKET_CONTROL, tmp);
4026 }
4027 }
4028 }
4029}
4030
4031void r600_irq_disable(struct radeon_device *rdev)
4032{
4033 r600_disable_interrupts(rdev);
4034
4035 mdelay(1);
4036 r600_irq_ack(rdev);
4037 r600_disable_interrupt_state(rdev);
4038}
4039
4040static u32 r600_get_ih_wptr(struct radeon_device *rdev)
4041{
4042 u32 wptr, tmp;
4043
4044 if (rdev->wb.enabled)
4045 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4046 else
4047 wptr = RREG32(IH_RB_WPTR);
4048
4049 if (wptr & RB_OVERFLOW) {
4050 wptr &= ~RB_OVERFLOW;
4051
4052
4053
4054
4055 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4056 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4057 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4058 tmp = RREG32(IH_RB_CNTL);
4059 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4060 WREG32(IH_RB_CNTL, tmp);
4061 }
4062 return (wptr & rdev->ih.ptr_mask);
4063}
4064
4065
4066
4067
4068
4069
4070
4071
4072
4073
4074
4075
4076
4077
4078
4079
4080
4081
4082
4083
4084
4085
4086
4087
4088
4089
4090
4091
4092
4093
4094
4095int r600_irq_process(struct radeon_device *rdev)
4096{
4097 u32 wptr;
4098 u32 rptr;
4099 u32 src_id, src_data;
4100 u32 ring_index;
4101 bool queue_hotplug = false;
4102 bool queue_hdmi = false;
4103 bool queue_thermal = false;
4104
4105 if (!rdev->ih.enabled || rdev->shutdown)
4106 return IRQ_NONE;
4107
4108
4109 if (!rdev->msi_enabled)
4110 RREG32(IH_RB_WPTR);
4111
4112 wptr = r600_get_ih_wptr(rdev);
4113
4114restart_ih:
4115
4116 if (atomic_xchg(&rdev->ih.lock, 1))
4117 return IRQ_NONE;
4118
4119 rptr = rdev->ih.rptr;
4120 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4121
4122
4123 rmb();
4124
4125
4126 r600_irq_ack(rdev);
4127
4128 while (rptr != wptr) {
4129
4130 ring_index = rptr / 4;
4131 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4132 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4133
4134 switch (src_id) {
4135 case 1:
4136 switch (src_data) {
4137 case 0:
4138 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT))
4139 DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
4140
4141 if (rdev->irq.crtc_vblank_int[0]) {
4142 drm_handle_vblank(rdev->ddev, 0);
4143 rdev->pm.vblank_sync = true;
4144 wake_up(&rdev->irq.vblank_queue);
4145 }
4146 if (atomic_read(&rdev->irq.pflip[0]))
4147 radeon_crtc_handle_vblank(rdev, 0);
4148 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4149 DRM_DEBUG("IH: D1 vblank\n");
4150
4151 break;
4152 case 1:
4153 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT))
4154 DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
4155
4156 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4157 DRM_DEBUG("IH: D1 vline\n");
4158
4159 break;
4160 default:
4161 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4162 break;
4163 }
4164 break;
4165 case 5:
4166 switch (src_data) {
4167 case 0:
4168 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT))
4169 DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
4170
4171 if (rdev->irq.crtc_vblank_int[1]) {
4172 drm_handle_vblank(rdev->ddev, 1);
4173 rdev->pm.vblank_sync = true;
4174 wake_up(&rdev->irq.vblank_queue);
4175 }
4176 if (atomic_read(&rdev->irq.pflip[1]))
4177 radeon_crtc_handle_vblank(rdev, 1);
4178 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VBLANK_INTERRUPT;
4179 DRM_DEBUG("IH: D2 vblank\n");
4180
4181 break;
4182 case 1:
4183 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT))
4184 DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
4185
4186 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VLINE_INTERRUPT;
4187 DRM_DEBUG("IH: D2 vline\n");
4188
4189 break;
4190 default:
4191 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4192 break;
4193 }
4194 break;
4195 case 9:
4196 DRM_DEBUG("IH: D1 flip\n");
4197 if (radeon_use_pflipirq > 0)
4198 radeon_crtc_handle_flip(rdev, 0);
4199 break;
4200 case 11:
4201 DRM_DEBUG("IH: D2 flip\n");
4202 if (radeon_use_pflipirq > 0)
4203 radeon_crtc_handle_flip(rdev, 1);
4204 break;
4205 case 19:
4206 switch (src_data) {
4207 case 0:
4208 if (!(rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT))
4209 DRM_DEBUG("IH: HPD1 - IH event w/o asserted irq bit?\n");
4210
4211 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD1_INTERRUPT;
4212 queue_hotplug = true;
4213 DRM_DEBUG("IH: HPD1\n");
4214 break;
4215 case 1:
4216 if (!(rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT))
4217 DRM_DEBUG("IH: HPD2 - IH event w/o asserted irq bit?\n");
4218
4219 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD2_INTERRUPT;
4220 queue_hotplug = true;
4221 DRM_DEBUG("IH: HPD2\n");
4222 break;
4223 case 4:
4224 if (!(rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT))
4225 DRM_DEBUG("IH: HPD3 - IH event w/o asserted irq bit?\n");
4226
4227 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD3_INTERRUPT;
4228 queue_hotplug = true;
4229 DRM_DEBUG("IH: HPD3\n");
4230 break;
4231 case 5:
4232 if (!(rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT))
4233 DRM_DEBUG("IH: HPD4 - IH event w/o asserted irq bit?\n");
4234
4235 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD4_INTERRUPT;
4236 queue_hotplug = true;
4237 DRM_DEBUG("IH: HPD4\n");
4238 break;
4239 case 10:
4240 if (!(rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT))
4241 DRM_DEBUG("IH: HPD5 - IH event w/o asserted irq bit?\n");
4242
4243 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD5_INTERRUPT;
4244 queue_hotplug = true;
4245 DRM_DEBUG("IH: HPD5\n");
4246 break;
4247 case 12:
4248 if (!(rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT))
4249 DRM_DEBUG("IH: HPD6 - IH event w/o asserted irq bit?\n");
4250
4251 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD6_INTERRUPT;
4252 queue_hotplug = true;
4253 DRM_DEBUG("IH: HPD6\n");
4254
4255 break;
4256 default:
4257 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4258 break;
4259 }
4260 break;
4261 case 21:
4262 switch (src_data) {
4263 case 4:
4264 if (!(rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG))
4265 DRM_DEBUG("IH: HDMI0 - IH event w/o asserted irq bit?\n");
4266
4267 rdev->irq.stat_regs.r600.hdmi0_status &= ~HDMI0_AZ_FORMAT_WTRIG;
4268 queue_hdmi = true;
4269 DRM_DEBUG("IH: HDMI0\n");
4270
4271 break;
4272 case 5:
4273 if (!(rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG))
4274 DRM_DEBUG("IH: HDMI1 - IH event w/o asserted irq bit?\n");
4275
4276 rdev->irq.stat_regs.r600.hdmi1_status &= ~HDMI0_AZ_FORMAT_WTRIG;
4277 queue_hdmi = true;
4278 DRM_DEBUG("IH: HDMI1\n");
4279
4280 break;
4281 default:
4282 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4283 break;
4284 }
4285 break;
4286 case 124:
4287 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4288 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4289 break;
4290 case 176:
4291 case 177:
4292 case 178:
4293 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4294 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4295 break;
4296 case 181:
4297 DRM_DEBUG("IH: CP EOP\n");
4298 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4299 break;
4300 case 224:
4301 DRM_DEBUG("IH: DMA trap\n");
4302 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4303 break;
4304 case 230:
4305 DRM_DEBUG("IH: thermal low to high\n");
4306 rdev->pm.dpm.thermal.high_to_low = false;
4307 queue_thermal = true;
4308 break;
4309 case 231:
4310 DRM_DEBUG("IH: thermal high to low\n");
4311 rdev->pm.dpm.thermal.high_to_low = true;
4312 queue_thermal = true;
4313 break;
4314 case 233:
4315 DRM_DEBUG("IH: GUI idle\n");
4316 break;
4317 default:
4318 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4319 break;
4320 }
4321
4322
4323 rptr += 16;
4324 rptr &= rdev->ih.ptr_mask;
4325 WREG32(IH_RB_RPTR, rptr);
4326 }
4327 if (queue_hotplug)
4328 schedule_delayed_work(&rdev->hotplug_work, 0);
4329 if (queue_hdmi)
4330 schedule_work(&rdev->audio_work);
4331 if (queue_thermal && rdev->pm.dpm_enabled)
4332 schedule_work(&rdev->pm.dpm.thermal.work);
4333 rdev->ih.rptr = rptr;
4334 atomic_set(&rdev->ih.lock, 0);
4335
4336
4337 wptr = r600_get_ih_wptr(rdev);
4338 if (wptr != rptr)
4339 goto restart_ih;
4340
4341 return IRQ_HANDLED;
4342}
4343
4344
4345
4346
4347#if defined(CONFIG_DEBUG_FS)
4348
4349static int r600_debugfs_mc_info(struct seq_file *m, void *data)
4350{
4351 struct drm_info_node *node = (struct drm_info_node *) m->private;
4352 struct drm_device *dev = node->minor->dev;
4353 struct radeon_device *rdev = dev->dev_private;
4354
4355 DREG32_SYS(m, rdev, R_000E50_SRBM_STATUS);
4356 DREG32_SYS(m, rdev, VM_L2_STATUS);
4357 return 0;
4358}
4359
4360static struct drm_info_list r600_mc_info_list[] = {
4361 {"r600_mc_info", r600_debugfs_mc_info, 0, NULL},
4362};
4363#endif
4364
4365int r600_debugfs_mc_info_init(struct radeon_device *rdev)
4366{
4367#if defined(CONFIG_DEBUG_FS)
4368 return radeon_debugfs_add_files(rdev, r600_mc_info_list, ARRAY_SIZE(r600_mc_info_list));
4369#else
4370 return 0;
4371#endif
4372}
4373
4374
4375
4376
4377
4378
4379
4380
4381
4382
4383void r600_mmio_hdp_flush(struct radeon_device *rdev)
4384{
4385
4386
4387
4388
4389
4390 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
4391 rdev->vram_scratch.ptr && !(rdev->flags & RADEON_IS_AGP)) {
4392 void __iomem *ptr = (void *)rdev->vram_scratch.ptr;
4393
4394 WREG32(HDP_DEBUG1, 0);
4395 readl((void __iomem *)ptr);
4396 } else
4397 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
4398}
4399
4400void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes)
4401{
4402 u32 link_width_cntl, mask;
4403
4404 if (rdev->flags & RADEON_IS_IGP)
4405 return;
4406
4407 if (!(rdev->flags & RADEON_IS_PCIE))
4408 return;
4409
4410
4411 if (ASIC_IS_X2(rdev))
4412 return;
4413
4414 radeon_gui_idle(rdev);
4415
4416 switch (lanes) {
4417 case 0:
4418 mask = RADEON_PCIE_LC_LINK_WIDTH_X0;
4419 break;
4420 case 1:
4421 mask = RADEON_PCIE_LC_LINK_WIDTH_X1;
4422 break;
4423 case 2:
4424 mask = RADEON_PCIE_LC_LINK_WIDTH_X2;
4425 break;
4426 case 4:
4427 mask = RADEON_PCIE_LC_LINK_WIDTH_X4;
4428 break;
4429 case 8:
4430 mask = RADEON_PCIE_LC_LINK_WIDTH_X8;
4431 break;
4432 case 12:
4433
4434 mask = RADEON_PCIE_LC_LINK_WIDTH_X12;
4435 break;
4436 case 16:
4437 mask = RADEON_PCIE_LC_LINK_WIDTH_X16;
4438 break;
4439 default:
4440 DRM_ERROR("invalid pcie lane request: %d\n", lanes);
4441 return;
4442 }
4443
4444 link_width_cntl = RREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
4445 link_width_cntl &= ~RADEON_PCIE_LC_LINK_WIDTH_MASK;
4446 link_width_cntl |= mask << RADEON_PCIE_LC_LINK_WIDTH_SHIFT;
4447 link_width_cntl |= (RADEON_PCIE_LC_RECONFIG_NOW |
4448 R600_PCIE_LC_RECONFIG_ARC_MISSING_ESCAPE);
4449
4450 WREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4451}
4452
4453int r600_get_pcie_lanes(struct radeon_device *rdev)
4454{
4455 u32 link_width_cntl;
4456
4457 if (rdev->flags & RADEON_IS_IGP)
4458 return 0;
4459
4460 if (!(rdev->flags & RADEON_IS_PCIE))
4461 return 0;
4462
4463
4464 if (ASIC_IS_X2(rdev))
4465 return 0;
4466
4467 radeon_gui_idle(rdev);
4468
4469 link_width_cntl = RREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
4470
4471 switch ((link_width_cntl & RADEON_PCIE_LC_LINK_WIDTH_RD_MASK) >> RADEON_PCIE_LC_LINK_WIDTH_RD_SHIFT) {
4472 case RADEON_PCIE_LC_LINK_WIDTH_X1:
4473 return 1;
4474 case RADEON_PCIE_LC_LINK_WIDTH_X2:
4475 return 2;
4476 case RADEON_PCIE_LC_LINK_WIDTH_X4:
4477 return 4;
4478 case RADEON_PCIE_LC_LINK_WIDTH_X8:
4479 return 8;
4480 case RADEON_PCIE_LC_LINK_WIDTH_X12:
4481
4482 return 12;
4483 case RADEON_PCIE_LC_LINK_WIDTH_X0:
4484 case RADEON_PCIE_LC_LINK_WIDTH_X16:
4485 default:
4486 return 16;
4487 }
4488}
4489
4490static void r600_pcie_gen2_enable(struct radeon_device *rdev)
4491{
4492 u32 link_width_cntl, lanes, speed_cntl, training_cntl, tmp;
4493 u16 link_cntl2;
4494
4495 if (radeon_pcie_gen2 == 0)
4496 return;
4497
4498 if (rdev->flags & RADEON_IS_IGP)
4499 return;
4500
4501 if (!(rdev->flags & RADEON_IS_PCIE))
4502 return;
4503
4504
4505 if (ASIC_IS_X2(rdev))
4506 return;
4507
4508
4509 if (rdev->family <= CHIP_R600)
4510 return;
4511
4512 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
4513 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
4514 return;
4515
4516 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4517 if (speed_cntl & LC_CURRENT_DATA_RATE) {
4518 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
4519 return;
4520 }
4521
4522 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
4523
4524
4525 if ((rdev->family == CHIP_RV670) ||
4526 (rdev->family == CHIP_RV620) ||
4527 (rdev->family == CHIP_RV635)) {
4528
4529 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4530 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
4531 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4532 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4533 if (link_width_cntl & LC_RENEGOTIATION_SUPPORT) {
4534 lanes = (link_width_cntl & LC_LINK_WIDTH_RD_MASK) >> LC_LINK_WIDTH_RD_SHIFT;
4535 link_width_cntl &= ~(LC_LINK_WIDTH_MASK |
4536 LC_RECONFIG_ARC_MISSING_ESCAPE);
4537 link_width_cntl |= lanes | LC_RECONFIG_NOW | LC_RENEGOTIATE_EN;
4538 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4539 } else {
4540 link_width_cntl |= LC_UPCONFIGURE_DIS;
4541 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4542 }
4543 }
4544
4545 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4546 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
4547 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
4548
4549
4550 if ((rdev->family == CHIP_RV670) ||
4551 (rdev->family == CHIP_RV620) ||
4552 (rdev->family == CHIP_RV635)) {
4553 WREG32(MM_CFGREGS_CNTL, 0x8);
4554 link_cntl2 = RREG32(0x4088);
4555 WREG32(MM_CFGREGS_CNTL, 0);
4556
4557 if (link_cntl2 & SELECTABLE_DEEMPHASIS)
4558 return;
4559 }
4560
4561 speed_cntl &= ~LC_SPEED_CHANGE_ATTEMPTS_ALLOWED_MASK;
4562 speed_cntl |= (0x3 << LC_SPEED_CHANGE_ATTEMPTS_ALLOWED_SHIFT);
4563 speed_cntl &= ~LC_VOLTAGE_TIMER_SEL_MASK;
4564 speed_cntl &= ~LC_FORCE_DIS_HW_SPEED_CHANGE;
4565 speed_cntl |= LC_FORCE_EN_HW_SPEED_CHANGE;
4566 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4567
4568 tmp = RREG32(0x541c);
4569 WREG32(0x541c, tmp | 0x8);
4570 WREG32(MM_CFGREGS_CNTL, MM_WR_TO_CFG_EN);
4571 link_cntl2 = RREG16(0x4088);
4572 link_cntl2 &= ~TARGET_LINK_SPEED_MASK;
4573 link_cntl2 |= 0x2;
4574 WREG16(0x4088, link_cntl2);
4575 WREG32(MM_CFGREGS_CNTL, 0);
4576
4577 if ((rdev->family == CHIP_RV670) ||
4578 (rdev->family == CHIP_RV620) ||
4579 (rdev->family == CHIP_RV635)) {
4580 training_cntl = RREG32_PCIE_PORT(PCIE_LC_TRAINING_CNTL);
4581 training_cntl &= ~LC_POINT_7_PLUS_EN;
4582 WREG32_PCIE_PORT(PCIE_LC_TRAINING_CNTL, training_cntl);
4583 } else {
4584 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4585 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
4586 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4587 }
4588
4589 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4590 speed_cntl |= LC_GEN2_EN_STRAP;
4591 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4592
4593 } else {
4594 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4595
4596 if (1)
4597 link_width_cntl |= LC_UPCONFIGURE_DIS;
4598 else
4599 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
4600 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4601 }
4602}
4603
4604
4605
4606
4607
4608
4609
4610
4611
4612uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev)
4613{
4614 uint64_t clock;
4615
4616 mutex_lock(&rdev->gpu_clock_mutex);
4617 WREG32(RLC_CAPTURE_GPU_CLOCK_COUNT, 1);
4618 clock = (uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_LSB) |
4619 ((uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_MSB) << 32ULL);
4620 mutex_unlock(&rdev->gpu_clock_mutex);
4621 return clock;
4622}
4623