1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29#include <linux/firmware.h>
30#include <linux/module.h>
31#include <linux/pci.h>
32#include <linux/slab.h>
33#include <linux/seq_file.h>
34
35#include <drm/drm_device.h>
36#include <drm/drm_vblank.h>
37#include <drm/radeon_drm.h>
38
39#include "atom.h"
40#include "avivod.h"
41#include "evergreen.h"
42#include "r600.h"
43#include "r600d.h"
44#include "rv770.h"
45#include "radeon.h"
46#include "radeon_asic.h"
47#include "radeon_audio.h"
48#include "radeon_mode.h"
49#include "radeon_ucode.h"
50
51
52MODULE_FIRMWARE("radeon/R600_pfp.bin");
53MODULE_FIRMWARE("radeon/R600_me.bin");
54MODULE_FIRMWARE("radeon/RV610_pfp.bin");
55MODULE_FIRMWARE("radeon/RV610_me.bin");
56MODULE_FIRMWARE("radeon/RV630_pfp.bin");
57MODULE_FIRMWARE("radeon/RV630_me.bin");
58MODULE_FIRMWARE("radeon/RV620_pfp.bin");
59MODULE_FIRMWARE("radeon/RV620_me.bin");
60MODULE_FIRMWARE("radeon/RV635_pfp.bin");
61MODULE_FIRMWARE("radeon/RV635_me.bin");
62MODULE_FIRMWARE("radeon/RV670_pfp.bin");
63MODULE_FIRMWARE("radeon/RV670_me.bin");
64MODULE_FIRMWARE("radeon/RS780_pfp.bin");
65MODULE_FIRMWARE("radeon/RS780_me.bin");
66MODULE_FIRMWARE("radeon/RV770_pfp.bin");
67MODULE_FIRMWARE("radeon/RV770_me.bin");
68MODULE_FIRMWARE("radeon/RV770_smc.bin");
69MODULE_FIRMWARE("radeon/RV730_pfp.bin");
70MODULE_FIRMWARE("radeon/RV730_me.bin");
71MODULE_FIRMWARE("radeon/RV730_smc.bin");
72MODULE_FIRMWARE("radeon/RV740_smc.bin");
73MODULE_FIRMWARE("radeon/RV710_pfp.bin");
74MODULE_FIRMWARE("radeon/RV710_me.bin");
75MODULE_FIRMWARE("radeon/RV710_smc.bin");
76MODULE_FIRMWARE("radeon/R600_rlc.bin");
77MODULE_FIRMWARE("radeon/R700_rlc.bin");
78MODULE_FIRMWARE("radeon/CEDAR_pfp.bin");
79MODULE_FIRMWARE("radeon/CEDAR_me.bin");
80MODULE_FIRMWARE("radeon/CEDAR_rlc.bin");
81MODULE_FIRMWARE("radeon/CEDAR_smc.bin");
82MODULE_FIRMWARE("radeon/REDWOOD_pfp.bin");
83MODULE_FIRMWARE("radeon/REDWOOD_me.bin");
84MODULE_FIRMWARE("radeon/REDWOOD_rlc.bin");
85MODULE_FIRMWARE("radeon/REDWOOD_smc.bin");
86MODULE_FIRMWARE("radeon/JUNIPER_pfp.bin");
87MODULE_FIRMWARE("radeon/JUNIPER_me.bin");
88MODULE_FIRMWARE("radeon/JUNIPER_rlc.bin");
89MODULE_FIRMWARE("radeon/JUNIPER_smc.bin");
90MODULE_FIRMWARE("radeon/CYPRESS_pfp.bin");
91MODULE_FIRMWARE("radeon/CYPRESS_me.bin");
92MODULE_FIRMWARE("radeon/CYPRESS_rlc.bin");
93MODULE_FIRMWARE("radeon/CYPRESS_smc.bin");
94MODULE_FIRMWARE("radeon/PALM_pfp.bin");
95MODULE_FIRMWARE("radeon/PALM_me.bin");
96MODULE_FIRMWARE("radeon/SUMO_rlc.bin");
97MODULE_FIRMWARE("radeon/SUMO_pfp.bin");
98MODULE_FIRMWARE("radeon/SUMO_me.bin");
99MODULE_FIRMWARE("radeon/SUMO2_pfp.bin");
100MODULE_FIRMWARE("radeon/SUMO2_me.bin");
101
102static const u32 crtc_offsets[2] =
103{
104 0,
105 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL
106};
107
108static void r600_debugfs_mc_info_init(struct radeon_device *rdev);
109
110
111int r600_mc_wait_for_idle(struct radeon_device *rdev);
112static void r600_gpu_init(struct radeon_device *rdev);
113void r600_fini(struct radeon_device *rdev);
114void r600_irq_disable(struct radeon_device *rdev);
115static void r600_pcie_gen2_enable(struct radeon_device *rdev);
116
117
118
119
120u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg)
121{
122 unsigned long flags;
123 u32 r;
124
125 spin_lock_irqsave(&rdev->rcu_idx_lock, flags);
126 WREG32(R600_RCU_INDEX, ((reg) & 0x1fff));
127 r = RREG32(R600_RCU_DATA);
128 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags);
129 return r;
130}
131
132void r600_rcu_wreg(struct radeon_device *rdev, u32 reg, u32 v)
133{
134 unsigned long flags;
135
136 spin_lock_irqsave(&rdev->rcu_idx_lock, flags);
137 WREG32(R600_RCU_INDEX, ((reg) & 0x1fff));
138 WREG32(R600_RCU_DATA, (v));
139 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags);
140}
141
142u32 r600_uvd_ctx_rreg(struct radeon_device *rdev, u32 reg)
143{
144 unsigned long flags;
145 u32 r;
146
147 spin_lock_irqsave(&rdev->uvd_idx_lock, flags);
148 WREG32(R600_UVD_CTX_INDEX, ((reg) & 0x1ff));
149 r = RREG32(R600_UVD_CTX_DATA);
150 spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags);
151 return r;
152}
153
154void r600_uvd_ctx_wreg(struct radeon_device *rdev, u32 reg, u32 v)
155{
156 unsigned long flags;
157
158 spin_lock_irqsave(&rdev->uvd_idx_lock, flags);
159 WREG32(R600_UVD_CTX_INDEX, ((reg) & 0x1ff));
160 WREG32(R600_UVD_CTX_DATA, (v));
161 spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags);
162}
163
164
165
166
167
168
169
170
171
172
173
174int r600_get_allowed_info_register(struct radeon_device *rdev,
175 u32 reg, u32 *val)
176{
177 switch (reg) {
178 case GRBM_STATUS:
179 case GRBM_STATUS2:
180 case R_000E50_SRBM_STATUS:
181 case DMA_STATUS_REG:
182 case UVD_STATUS:
183 *val = RREG32(reg);
184 return 0;
185 default:
186 return -EINVAL;
187 }
188}
189
190
191
192
193
194
195
196
197
198u32 r600_get_xclk(struct radeon_device *rdev)
199{
200 return rdev->clock.spll.reference_freq;
201}
202
203int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
204{
205 unsigned fb_div = 0, ref_div, vclk_div = 0, dclk_div = 0;
206 int r;
207
208
209 WREG32_P(CG_UPLL_FUNC_CNTL_2,
210 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
211 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
212
213
214 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~(
215 UPLL_RESET_MASK | UPLL_SLEEP_MASK | UPLL_CTLREQ_MASK));
216
217 if (rdev->family >= CHIP_RS780)
218 WREG32_P(GFX_MACRO_BYPASS_CNTL, UPLL_BYPASS_CNTL,
219 ~UPLL_BYPASS_CNTL);
220
221 if (!vclk || !dclk) {
222
223 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
224 return 0;
225 }
226
227 if (rdev->clock.spll.reference_freq == 10000)
228 ref_div = 34;
229 else
230 ref_div = 4;
231
232 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000,
233 ref_div + 1, 0xFFF, 2, 30, ~0,
234 &fb_div, &vclk_div, &dclk_div);
235 if (r)
236 return r;
237
238 if (rdev->family >= CHIP_RV670 && rdev->family < CHIP_RS780)
239 fb_div >>= 1;
240 else
241 fb_div |= 1;
242
243 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
244 if (r)
245 return r;
246
247
248 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
249
250
251 if (rdev->family >= CHIP_RS780)
252 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_REFCLK_SRC_SEL_MASK,
253 ~UPLL_REFCLK_SRC_SEL_MASK);
254
255
256 WREG32_P(CG_UPLL_FUNC_CNTL,
257 UPLL_FB_DIV(fb_div) |
258 UPLL_REF_DIV(ref_div),
259 ~(UPLL_FB_DIV_MASK | UPLL_REF_DIV_MASK));
260 WREG32_P(CG_UPLL_FUNC_CNTL_2,
261 UPLL_SW_HILEN(vclk_div >> 1) |
262 UPLL_SW_LOLEN((vclk_div >> 1) + (vclk_div & 1)) |
263 UPLL_SW_HILEN2(dclk_div >> 1) |
264 UPLL_SW_LOLEN2((dclk_div >> 1) + (dclk_div & 1)) |
265 UPLL_DIVEN_MASK | UPLL_DIVEN2_MASK,
266 ~UPLL_SW_MASK);
267
268
269 mdelay(15);
270
271
272 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
273
274 mdelay(15);
275
276
277 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
278
279 if (rdev->family >= CHIP_RS780)
280 WREG32_P(GFX_MACRO_BYPASS_CNTL, 0, ~UPLL_BYPASS_CNTL);
281
282 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
283 if (r)
284 return r;
285
286
287 WREG32_P(CG_UPLL_FUNC_CNTL_2,
288 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
289 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
290
291 mdelay(100);
292
293 return 0;
294}
295
296void dce3_program_fmt(struct drm_encoder *encoder)
297{
298 struct drm_device *dev = encoder->dev;
299 struct radeon_device *rdev = dev->dev_private;
300 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
301 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
302 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
303 int bpc = 0;
304 u32 tmp = 0;
305 enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
306
307 if (connector) {
308 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
309 bpc = radeon_get_monitor_bpc(connector);
310 dither = radeon_connector->dither;
311 }
312
313
314 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
315 return;
316
317
318 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
319 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
320 return;
321
322 if (bpc == 0)
323 return;
324
325 switch (bpc) {
326 case 6:
327 if (dither == RADEON_FMT_DITHER_ENABLE)
328
329 tmp |= FMT_SPATIAL_DITHER_EN;
330 else
331 tmp |= FMT_TRUNCATE_EN;
332 break;
333 case 8:
334 if (dither == RADEON_FMT_DITHER_ENABLE)
335
336 tmp |= (FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
337 else
338 tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
339 break;
340 case 10:
341 default:
342
343 break;
344 }
345
346 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
347}
348
349
350int rv6xx_get_temp(struct radeon_device *rdev)
351{
352 u32 temp = (RREG32(CG_THERMAL_STATUS) & ASIC_T_MASK) >>
353 ASIC_T_SHIFT;
354 int actual_temp = temp & 0xff;
355
356 if (temp & 0x100)
357 actual_temp -= 256;
358
359 return actual_temp * 1000;
360}
361
362void r600_pm_get_dynpm_state(struct radeon_device *rdev)
363{
364 int i;
365
366 rdev->pm.dynpm_can_upclock = true;
367 rdev->pm.dynpm_can_downclock = true;
368
369
370 if ((rdev->flags & RADEON_IS_IGP) || (rdev->family == CHIP_R600)) {
371 int min_power_state_index = 0;
372
373 if (rdev->pm.num_power_states > 2)
374 min_power_state_index = 1;
375
376 switch (rdev->pm.dynpm_planned_action) {
377 case DYNPM_ACTION_MINIMUM:
378 rdev->pm.requested_power_state_index = min_power_state_index;
379 rdev->pm.requested_clock_mode_index = 0;
380 rdev->pm.dynpm_can_downclock = false;
381 break;
382 case DYNPM_ACTION_DOWNCLOCK:
383 if (rdev->pm.current_power_state_index == min_power_state_index) {
384 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
385 rdev->pm.dynpm_can_downclock = false;
386 } else {
387 if (rdev->pm.active_crtc_count > 1) {
388 for (i = 0; i < rdev->pm.num_power_states; i++) {
389 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
390 continue;
391 else if (i >= rdev->pm.current_power_state_index) {
392 rdev->pm.requested_power_state_index =
393 rdev->pm.current_power_state_index;
394 break;
395 } else {
396 rdev->pm.requested_power_state_index = i;
397 break;
398 }
399 }
400 } else {
401 if (rdev->pm.current_power_state_index == 0)
402 rdev->pm.requested_power_state_index =
403 rdev->pm.num_power_states - 1;
404 else
405 rdev->pm.requested_power_state_index =
406 rdev->pm.current_power_state_index - 1;
407 }
408 }
409 rdev->pm.requested_clock_mode_index = 0;
410
411 if ((rdev->pm.active_crtc_count > 0) &&
412 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
413 clock_info[rdev->pm.requested_clock_mode_index].flags &
414 RADEON_PM_MODE_NO_DISPLAY)) {
415 rdev->pm.requested_power_state_index++;
416 }
417 break;
418 case DYNPM_ACTION_UPCLOCK:
419 if (rdev->pm.current_power_state_index == (rdev->pm.num_power_states - 1)) {
420 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
421 rdev->pm.dynpm_can_upclock = false;
422 } else {
423 if (rdev->pm.active_crtc_count > 1) {
424 for (i = (rdev->pm.num_power_states - 1); i >= 0; i--) {
425 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
426 continue;
427 else if (i <= rdev->pm.current_power_state_index) {
428 rdev->pm.requested_power_state_index =
429 rdev->pm.current_power_state_index;
430 break;
431 } else {
432 rdev->pm.requested_power_state_index = i;
433 break;
434 }
435 }
436 } else
437 rdev->pm.requested_power_state_index =
438 rdev->pm.current_power_state_index + 1;
439 }
440 rdev->pm.requested_clock_mode_index = 0;
441 break;
442 case DYNPM_ACTION_DEFAULT:
443 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
444 rdev->pm.requested_clock_mode_index = 0;
445 rdev->pm.dynpm_can_upclock = false;
446 break;
447 case DYNPM_ACTION_NONE:
448 default:
449 DRM_ERROR("Requested mode for not defined action\n");
450 return;
451 }
452 } else {
453
454
455
456 if (rdev->pm.active_crtc_count > 1) {
457 rdev->pm.requested_power_state_index = -1;
458
459 for (i = 1; i < rdev->pm.num_power_states; i++) {
460 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
461 continue;
462 else if ((rdev->pm.power_state[i].type == POWER_STATE_TYPE_PERFORMANCE) ||
463 (rdev->pm.power_state[i].type == POWER_STATE_TYPE_BATTERY)) {
464 rdev->pm.requested_power_state_index = i;
465 break;
466 }
467 }
468
469 if (rdev->pm.requested_power_state_index == -1)
470 rdev->pm.requested_power_state_index = 0;
471 } else
472 rdev->pm.requested_power_state_index = 1;
473
474 switch (rdev->pm.dynpm_planned_action) {
475 case DYNPM_ACTION_MINIMUM:
476 rdev->pm.requested_clock_mode_index = 0;
477 rdev->pm.dynpm_can_downclock = false;
478 break;
479 case DYNPM_ACTION_DOWNCLOCK:
480 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
481 if (rdev->pm.current_clock_mode_index == 0) {
482 rdev->pm.requested_clock_mode_index = 0;
483 rdev->pm.dynpm_can_downclock = false;
484 } else
485 rdev->pm.requested_clock_mode_index =
486 rdev->pm.current_clock_mode_index - 1;
487 } else {
488 rdev->pm.requested_clock_mode_index = 0;
489 rdev->pm.dynpm_can_downclock = false;
490 }
491
492 if ((rdev->pm.active_crtc_count > 0) &&
493 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
494 clock_info[rdev->pm.requested_clock_mode_index].flags &
495 RADEON_PM_MODE_NO_DISPLAY)) {
496 rdev->pm.requested_clock_mode_index++;
497 }
498 break;
499 case DYNPM_ACTION_UPCLOCK:
500 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
501 if (rdev->pm.current_clock_mode_index ==
502 (rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1)) {
503 rdev->pm.requested_clock_mode_index = rdev->pm.current_clock_mode_index;
504 rdev->pm.dynpm_can_upclock = false;
505 } else
506 rdev->pm.requested_clock_mode_index =
507 rdev->pm.current_clock_mode_index + 1;
508 } else {
509 rdev->pm.requested_clock_mode_index =
510 rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1;
511 rdev->pm.dynpm_can_upclock = false;
512 }
513 break;
514 case DYNPM_ACTION_DEFAULT:
515 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
516 rdev->pm.requested_clock_mode_index = 0;
517 rdev->pm.dynpm_can_upclock = false;
518 break;
519 case DYNPM_ACTION_NONE:
520 default:
521 DRM_ERROR("Requested mode for not defined action\n");
522 return;
523 }
524 }
525
526 DRM_DEBUG_DRIVER("Requested: e: %d m: %d p: %d\n",
527 rdev->pm.power_state[rdev->pm.requested_power_state_index].
528 clock_info[rdev->pm.requested_clock_mode_index].sclk,
529 rdev->pm.power_state[rdev->pm.requested_power_state_index].
530 clock_info[rdev->pm.requested_clock_mode_index].mclk,
531 rdev->pm.power_state[rdev->pm.requested_power_state_index].
532 pcie_lanes);
533}
534
535void rs780_pm_init_profile(struct radeon_device *rdev)
536{
537 if (rdev->pm.num_power_states == 2) {
538
539 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
540 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
541 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
542 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
543
544 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0;
545 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0;
546 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
547 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
548
549 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0;
550 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 0;
551 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
552 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
553
554 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0;
555 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
556 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
557 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
558
559 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 0;
560 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
561 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
562 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
563
564 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0;
565 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
566 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
567 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
568
569 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0;
570 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 1;
571 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
572 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
573 } else if (rdev->pm.num_power_states == 3) {
574
575 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
576 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
577 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
578 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
579
580 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
581 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
582 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
583 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
584
585 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
586 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
587 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
588 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
589
590 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
591 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 2;
592 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
593 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
594
595 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 1;
596 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 1;
597 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
598 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
599
600 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 1;
601 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 1;
602 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
603 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
604
605 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 1;
606 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
607 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
608 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
609 } else {
610
611 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
612 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
613 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
614 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
615
616 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 2;
617 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 2;
618 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
619 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
620
621 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 2;
622 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 2;
623 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
624 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
625
626 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 2;
627 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 3;
628 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
629 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
630
631 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
632 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
633 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
634 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
635
636 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
637 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
638 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
639 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
640
641 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
642 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 3;
643 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
644 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
645 }
646}
647
648void r600_pm_init_profile(struct radeon_device *rdev)
649{
650 int idx;
651
652 if (rdev->family == CHIP_R600) {
653
654
655 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
656 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
657 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
658 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
659
660 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
661 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
662 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
663 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
664
665 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
666 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
667 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
668 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
669
670 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
671 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
672 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
673 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
674
675 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
676 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
677 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
678 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
679
680 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
681 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
682 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
683 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
684
685 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
686 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
687 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
688 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
689 } else {
690 if (rdev->pm.num_power_states < 4) {
691
692 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
693 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
694 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
695 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
696
697 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
698 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
699 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
700 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
701
702 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
703 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
704 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
705 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
706
707 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
708 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
709 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
710 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
711
712 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
713 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 2;
714 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
715 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
716
717 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
718 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 2;
719 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
720 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
721
722 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
723 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
724 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
725 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
726 } else {
727
728 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
729 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
730 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
731 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
732
733 if (rdev->flags & RADEON_IS_MOBILITY)
734 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
735 else
736 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
737 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
738 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
739 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
740 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
741
742 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
743 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
744 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
745 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
746
747 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
748 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
749 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
750 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
751 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
752
753 if (rdev->flags & RADEON_IS_MOBILITY)
754 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1);
755 else
756 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
757 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
758 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
759 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
760 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
761
762 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
763 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
764 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
765 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
766
767 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
768 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
769 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
770 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
771 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
772 }
773 }
774}
775
776void r600_pm_misc(struct radeon_device *rdev)
777{
778 int req_ps_idx = rdev->pm.requested_power_state_index;
779 int req_cm_idx = rdev->pm.requested_clock_mode_index;
780 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
781 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
782
783 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
784
785 if (voltage->voltage == 0xff01)
786 return;
787 if (voltage->voltage != rdev->pm.current_vddc) {
788 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
789 rdev->pm.current_vddc = voltage->voltage;
790 DRM_DEBUG_DRIVER("Setting: v: %d\n", voltage->voltage);
791 }
792 }
793}
794
795bool r600_gui_idle(struct radeon_device *rdev)
796{
797 if (RREG32(GRBM_STATUS) & GUI_ACTIVE)
798 return false;
799 else
800 return true;
801}
802
803
804bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
805{
806 bool connected = false;
807
808 if (ASIC_IS_DCE3(rdev)) {
809 switch (hpd) {
810 case RADEON_HPD_1:
811 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
812 connected = true;
813 break;
814 case RADEON_HPD_2:
815 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
816 connected = true;
817 break;
818 case RADEON_HPD_3:
819 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
820 connected = true;
821 break;
822 case RADEON_HPD_4:
823 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
824 connected = true;
825 break;
826
827 case RADEON_HPD_5:
828 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
829 connected = true;
830 break;
831 case RADEON_HPD_6:
832 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
833 connected = true;
834 break;
835 default:
836 break;
837 }
838 } else {
839 switch (hpd) {
840 case RADEON_HPD_1:
841 if (RREG32(DC_HOT_PLUG_DETECT1_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
842 connected = true;
843 break;
844 case RADEON_HPD_2:
845 if (RREG32(DC_HOT_PLUG_DETECT2_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
846 connected = true;
847 break;
848 case RADEON_HPD_3:
849 if (RREG32(DC_HOT_PLUG_DETECT3_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
850 connected = true;
851 break;
852 default:
853 break;
854 }
855 }
856 return connected;
857}
858
859void r600_hpd_set_polarity(struct radeon_device *rdev,
860 enum radeon_hpd_id hpd)
861{
862 u32 tmp;
863 bool connected = r600_hpd_sense(rdev, hpd);
864
865 if (ASIC_IS_DCE3(rdev)) {
866 switch (hpd) {
867 case RADEON_HPD_1:
868 tmp = RREG32(DC_HPD1_INT_CONTROL);
869 if (connected)
870 tmp &= ~DC_HPDx_INT_POLARITY;
871 else
872 tmp |= DC_HPDx_INT_POLARITY;
873 WREG32(DC_HPD1_INT_CONTROL, tmp);
874 break;
875 case RADEON_HPD_2:
876 tmp = RREG32(DC_HPD2_INT_CONTROL);
877 if (connected)
878 tmp &= ~DC_HPDx_INT_POLARITY;
879 else
880 tmp |= DC_HPDx_INT_POLARITY;
881 WREG32(DC_HPD2_INT_CONTROL, tmp);
882 break;
883 case RADEON_HPD_3:
884 tmp = RREG32(DC_HPD3_INT_CONTROL);
885 if (connected)
886 tmp &= ~DC_HPDx_INT_POLARITY;
887 else
888 tmp |= DC_HPDx_INT_POLARITY;
889 WREG32(DC_HPD3_INT_CONTROL, tmp);
890 break;
891 case RADEON_HPD_4:
892 tmp = RREG32(DC_HPD4_INT_CONTROL);
893 if (connected)
894 tmp &= ~DC_HPDx_INT_POLARITY;
895 else
896 tmp |= DC_HPDx_INT_POLARITY;
897 WREG32(DC_HPD4_INT_CONTROL, tmp);
898 break;
899 case RADEON_HPD_5:
900 tmp = RREG32(DC_HPD5_INT_CONTROL);
901 if (connected)
902 tmp &= ~DC_HPDx_INT_POLARITY;
903 else
904 tmp |= DC_HPDx_INT_POLARITY;
905 WREG32(DC_HPD5_INT_CONTROL, tmp);
906 break;
907
908 case RADEON_HPD_6:
909 tmp = RREG32(DC_HPD6_INT_CONTROL);
910 if (connected)
911 tmp &= ~DC_HPDx_INT_POLARITY;
912 else
913 tmp |= DC_HPDx_INT_POLARITY;
914 WREG32(DC_HPD6_INT_CONTROL, tmp);
915 break;
916 default:
917 break;
918 }
919 } else {
920 switch (hpd) {
921 case RADEON_HPD_1:
922 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
923 if (connected)
924 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
925 else
926 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
927 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
928 break;
929 case RADEON_HPD_2:
930 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
931 if (connected)
932 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
933 else
934 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
935 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
936 break;
937 case RADEON_HPD_3:
938 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
939 if (connected)
940 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
941 else
942 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
943 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
944 break;
945 default:
946 break;
947 }
948 }
949}
950
951void r600_hpd_init(struct radeon_device *rdev)
952{
953 struct drm_device *dev = rdev->ddev;
954 struct drm_connector *connector;
955 unsigned enable = 0;
956
957 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
958 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
959
960 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
961 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
962
963
964
965
966 continue;
967 }
968 if (ASIC_IS_DCE3(rdev)) {
969 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) | DC_HPDx_RX_INT_TIMER(0xfa);
970 if (ASIC_IS_DCE32(rdev))
971 tmp |= DC_HPDx_EN;
972
973 switch (radeon_connector->hpd.hpd) {
974 case RADEON_HPD_1:
975 WREG32(DC_HPD1_CONTROL, tmp);
976 break;
977 case RADEON_HPD_2:
978 WREG32(DC_HPD2_CONTROL, tmp);
979 break;
980 case RADEON_HPD_3:
981 WREG32(DC_HPD3_CONTROL, tmp);
982 break;
983 case RADEON_HPD_4:
984 WREG32(DC_HPD4_CONTROL, tmp);
985 break;
986
987 case RADEON_HPD_5:
988 WREG32(DC_HPD5_CONTROL, tmp);
989 break;
990 case RADEON_HPD_6:
991 WREG32(DC_HPD6_CONTROL, tmp);
992 break;
993 default:
994 break;
995 }
996 } else {
997 switch (radeon_connector->hpd.hpd) {
998 case RADEON_HPD_1:
999 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, DC_HOT_PLUG_DETECTx_EN);
1000 break;
1001 case RADEON_HPD_2:
1002 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, DC_HOT_PLUG_DETECTx_EN);
1003 break;
1004 case RADEON_HPD_3:
1005 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, DC_HOT_PLUG_DETECTx_EN);
1006 break;
1007 default:
1008 break;
1009 }
1010 }
1011 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1012 enable |= 1 << radeon_connector->hpd.hpd;
1013 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1014 }
1015 radeon_irq_kms_enable_hpd(rdev, enable);
1016}
1017
1018void r600_hpd_fini(struct radeon_device *rdev)
1019{
1020 struct drm_device *dev = rdev->ddev;
1021 struct drm_connector *connector;
1022 unsigned disable = 0;
1023
1024 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1025 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1026 if (ASIC_IS_DCE3(rdev)) {
1027 switch (radeon_connector->hpd.hpd) {
1028 case RADEON_HPD_1:
1029 WREG32(DC_HPD1_CONTROL, 0);
1030 break;
1031 case RADEON_HPD_2:
1032 WREG32(DC_HPD2_CONTROL, 0);
1033 break;
1034 case RADEON_HPD_3:
1035 WREG32(DC_HPD3_CONTROL, 0);
1036 break;
1037 case RADEON_HPD_4:
1038 WREG32(DC_HPD4_CONTROL, 0);
1039 break;
1040
1041 case RADEON_HPD_5:
1042 WREG32(DC_HPD5_CONTROL, 0);
1043 break;
1044 case RADEON_HPD_6:
1045 WREG32(DC_HPD6_CONTROL, 0);
1046 break;
1047 default:
1048 break;
1049 }
1050 } else {
1051 switch (radeon_connector->hpd.hpd) {
1052 case RADEON_HPD_1:
1053 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, 0);
1054 break;
1055 case RADEON_HPD_2:
1056 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, 0);
1057 break;
1058 case RADEON_HPD_3:
1059 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, 0);
1060 break;
1061 default:
1062 break;
1063 }
1064 }
1065 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1066 disable |= 1 << radeon_connector->hpd.hpd;
1067 }
1068 radeon_irq_kms_disable_hpd(rdev, disable);
1069}
1070
1071
1072
1073
1074void r600_pcie_gart_tlb_flush(struct radeon_device *rdev)
1075{
1076 unsigned i;
1077 u32 tmp;
1078
1079
1080 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
1081 !(rdev->flags & RADEON_IS_AGP)) {
1082 void __iomem *ptr = (void *)rdev->gart.ptr;
1083
1084
1085
1086
1087
1088
1089 WREG32(HDP_DEBUG1, 0);
1090 readl((void __iomem *)ptr);
1091 } else
1092 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
1093
1094 WREG32(VM_CONTEXT0_INVALIDATION_LOW_ADDR, rdev->mc.gtt_start >> 12);
1095 WREG32(VM_CONTEXT0_INVALIDATION_HIGH_ADDR, (rdev->mc.gtt_end - 1) >> 12);
1096 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
1097 for (i = 0; i < rdev->usec_timeout; i++) {
1098
1099 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
1100 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
1101 if (tmp == 2) {
1102 pr_warn("[drm] r600 flush TLB failed\n");
1103 return;
1104 }
1105 if (tmp) {
1106 return;
1107 }
1108 udelay(1);
1109 }
1110}
1111
1112int r600_pcie_gart_init(struct radeon_device *rdev)
1113{
1114 int r;
1115
1116 if (rdev->gart.robj) {
1117 WARN(1, "R600 PCIE GART already initialized\n");
1118 return 0;
1119 }
1120
1121 r = radeon_gart_init(rdev);
1122 if (r)
1123 return r;
1124 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8;
1125 return radeon_gart_table_vram_alloc(rdev);
1126}
1127
1128static int r600_pcie_gart_enable(struct radeon_device *rdev)
1129{
1130 u32 tmp;
1131 int r, i;
1132
1133 if (rdev->gart.robj == NULL) {
1134 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
1135 return -EINVAL;
1136 }
1137 r = radeon_gart_table_vram_pin(rdev);
1138 if (r)
1139 return r;
1140
1141
1142 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
1143 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
1144 EFFECTIVE_L2_QUEUE_SIZE(7));
1145 WREG32(VM_L2_CNTL2, 0);
1146 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
1147
1148 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
1149 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
1150 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
1151 ENABLE_WAIT_L2_QUERY;
1152 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
1153 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
1154 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp | ENABLE_L1_STRICT_ORDERING);
1155 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1156 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1157 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1158 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1159 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1160 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1161 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1162 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1163 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1164 WREG32(MC_VM_L1_TLB_MCB_RD_UVD_CNTL, tmp);
1165 WREG32(MC_VM_L1_TLB_MCB_WR_UVD_CNTL, tmp);
1166 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1167 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1168 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
1169 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
1170 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
1171 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
1172 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
1173 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
1174 (u32)(rdev->dummy_page.addr >> 12));
1175 for (i = 1; i < 7; i++)
1176 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1177
1178 r600_pcie_gart_tlb_flush(rdev);
1179 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
1180 (unsigned)(rdev->mc.gtt_size >> 20),
1181 (unsigned long long)rdev->gart.table_addr);
1182 rdev->gart.ready = true;
1183 return 0;
1184}
1185
1186static void r600_pcie_gart_disable(struct radeon_device *rdev)
1187{
1188 u32 tmp;
1189 int i;
1190
1191
1192 for (i = 0; i < 7; i++)
1193 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1194
1195
1196 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
1197 EFFECTIVE_L2_QUEUE_SIZE(7));
1198 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
1199
1200 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
1201 ENABLE_WAIT_L2_QUERY;
1202 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1203 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1204 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1205 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1206 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1207 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1208 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1209 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1210 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp);
1211 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp);
1212 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
1213 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
1214 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp);
1215 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1216 WREG32(MC_VM_L1_TLB_MCB_RD_UVD_CNTL, tmp);
1217 WREG32(MC_VM_L1_TLB_MCB_WR_UVD_CNTL, tmp);
1218 radeon_gart_table_vram_unpin(rdev);
1219}
1220
1221static void r600_pcie_gart_fini(struct radeon_device *rdev)
1222{
1223 radeon_gart_fini(rdev);
1224 r600_pcie_gart_disable(rdev);
1225 radeon_gart_table_vram_free(rdev);
1226}
1227
1228static void r600_agp_enable(struct radeon_device *rdev)
1229{
1230 u32 tmp;
1231 int i;
1232
1233
1234 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
1235 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
1236 EFFECTIVE_L2_QUEUE_SIZE(7));
1237 WREG32(VM_L2_CNTL2, 0);
1238 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
1239
1240 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
1241 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
1242 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
1243 ENABLE_WAIT_L2_QUERY;
1244 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
1245 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
1246 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp | ENABLE_L1_STRICT_ORDERING);
1247 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1248 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1249 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1250 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1251 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1252 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1253 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1254 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1255 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1256 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1257 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1258 for (i = 0; i < 7; i++)
1259 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1260}
1261
1262int r600_mc_wait_for_idle(struct radeon_device *rdev)
1263{
1264 unsigned i;
1265 u32 tmp;
1266
1267 for (i = 0; i < rdev->usec_timeout; i++) {
1268
1269 tmp = RREG32(R_000E50_SRBM_STATUS) & 0x3F00;
1270 if (!tmp)
1271 return 0;
1272 udelay(1);
1273 }
1274 return -1;
1275}
1276
1277uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg)
1278{
1279 unsigned long flags;
1280 uint32_t r;
1281
1282 spin_lock_irqsave(&rdev->mc_idx_lock, flags);
1283 WREG32(R_0028F8_MC_INDEX, S_0028F8_MC_IND_ADDR(reg));
1284 r = RREG32(R_0028FC_MC_DATA);
1285 WREG32(R_0028F8_MC_INDEX, ~C_0028F8_MC_IND_ADDR);
1286 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
1287 return r;
1288}
1289
1290void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
1291{
1292 unsigned long flags;
1293
1294 spin_lock_irqsave(&rdev->mc_idx_lock, flags);
1295 WREG32(R_0028F8_MC_INDEX, S_0028F8_MC_IND_ADDR(reg) |
1296 S_0028F8_MC_IND_WR_EN(1));
1297 WREG32(R_0028FC_MC_DATA, v);
1298 WREG32(R_0028F8_MC_INDEX, 0x7F);
1299 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
1300}
1301
1302static void r600_mc_program(struct radeon_device *rdev)
1303{
1304 struct rv515_mc_save save;
1305 u32 tmp;
1306 int i, j;
1307
1308
1309 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1310 WREG32((0x2c14 + j), 0x00000000);
1311 WREG32((0x2c18 + j), 0x00000000);
1312 WREG32((0x2c1c + j), 0x00000000);
1313 WREG32((0x2c20 + j), 0x00000000);
1314 WREG32((0x2c24 + j), 0x00000000);
1315 }
1316 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
1317
1318 rv515_mc_stop(rdev, &save);
1319 if (r600_mc_wait_for_idle(rdev)) {
1320 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1321 }
1322
1323 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
1324
1325 if (rdev->flags & RADEON_IS_AGP) {
1326 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1327
1328 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1329 rdev->mc.vram_start >> 12);
1330 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1331 rdev->mc.gtt_end >> 12);
1332 } else {
1333
1334 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1335 rdev->mc.gtt_start >> 12);
1336 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1337 rdev->mc.vram_end >> 12);
1338 }
1339 } else {
1340 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start >> 12);
1341 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end >> 12);
1342 }
1343 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
1344 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1345 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1346 WREG32(MC_VM_FB_LOCATION, tmp);
1347 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1348 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
1349 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
1350 if (rdev->flags & RADEON_IS_AGP) {
1351 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 22);
1352 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 22);
1353 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1354 } else {
1355 WREG32(MC_VM_AGP_BASE, 0);
1356 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
1357 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
1358 }
1359 if (r600_mc_wait_for_idle(rdev)) {
1360 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1361 }
1362 rv515_mc_resume(rdev, &save);
1363
1364
1365 rv515_vga_render_disable(rdev);
1366}
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389static void r600_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
1390{
1391 u64 size_bf, size_af;
1392
1393 if (mc->mc_vram_size > 0xE0000000) {
1394
1395 dev_warn(rdev->dev, "limiting VRAM\n");
1396 mc->real_vram_size = 0xE0000000;
1397 mc->mc_vram_size = 0xE0000000;
1398 }
1399 if (rdev->flags & RADEON_IS_AGP) {
1400 size_bf = mc->gtt_start;
1401 size_af = mc->mc_mask - mc->gtt_end;
1402 if (size_bf > size_af) {
1403 if (mc->mc_vram_size > size_bf) {
1404 dev_warn(rdev->dev, "limiting VRAM\n");
1405 mc->real_vram_size = size_bf;
1406 mc->mc_vram_size = size_bf;
1407 }
1408 mc->vram_start = mc->gtt_start - mc->mc_vram_size;
1409 } else {
1410 if (mc->mc_vram_size > size_af) {
1411 dev_warn(rdev->dev, "limiting VRAM\n");
1412 mc->real_vram_size = size_af;
1413 mc->mc_vram_size = size_af;
1414 }
1415 mc->vram_start = mc->gtt_end + 1;
1416 }
1417 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
1418 dev_info(rdev->dev, "VRAM: %lluM 0x%08llX - 0x%08llX (%lluM used)\n",
1419 mc->mc_vram_size >> 20, mc->vram_start,
1420 mc->vram_end, mc->real_vram_size >> 20);
1421 } else {
1422 u64 base = 0;
1423 if (rdev->flags & RADEON_IS_IGP) {
1424 base = RREG32(MC_VM_FB_LOCATION) & 0xFFFF;
1425 base <<= 24;
1426 }
1427 radeon_vram_location(rdev, &rdev->mc, base);
1428 rdev->mc.gtt_base_align = 0;
1429 radeon_gtt_location(rdev, mc);
1430 }
1431}
1432
1433static int r600_mc_init(struct radeon_device *rdev)
1434{
1435 u32 tmp;
1436 int chansize, numchan;
1437 uint32_t h_addr, l_addr;
1438 unsigned long long k8_addr;
1439
1440
1441 rdev->mc.vram_is_ddr = true;
1442 tmp = RREG32(RAMCFG);
1443 if (tmp & CHANSIZE_OVERRIDE) {
1444 chansize = 16;
1445 } else if (tmp & CHANSIZE_MASK) {
1446 chansize = 64;
1447 } else {
1448 chansize = 32;
1449 }
1450 tmp = RREG32(CHMAP);
1451 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1452 case 0:
1453 default:
1454 numchan = 1;
1455 break;
1456 case 1:
1457 numchan = 2;
1458 break;
1459 case 2:
1460 numchan = 4;
1461 break;
1462 case 3:
1463 numchan = 8;
1464 break;
1465 }
1466 rdev->mc.vram_width = numchan * chansize;
1467
1468 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1469 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1470
1471 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
1472 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
1473 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1474 r600_vram_gtt_location(rdev, &rdev->mc);
1475
1476 if (rdev->flags & RADEON_IS_IGP) {
1477 rs690_pm_info(rdev);
1478 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
1479
1480 if (rdev->family == CHIP_RS780 || rdev->family == CHIP_RS880) {
1481
1482 rdev->fastfb_working = false;
1483 h_addr = G_000012_K8_ADDR_EXT(RREG32_MC(R_000012_MC_MISC_UMA_CNTL));
1484 l_addr = RREG32_MC(R_000011_K8_FB_LOCATION);
1485 k8_addr = ((unsigned long long)h_addr) << 32 | l_addr;
1486#if defined(CONFIG_X86_32) && !defined(CONFIG_X86_PAE)
1487 if (k8_addr + rdev->mc.visible_vram_size < 0x100000000ULL)
1488#endif
1489 {
1490
1491
1492
1493 if (rdev->mc.igp_sideport_enabled == false && radeon_fastfb == 1) {
1494 DRM_INFO("Direct mapping: aper base at 0x%llx, replaced by direct mapping base 0x%llx.\n",
1495 (unsigned long long)rdev->mc.aper_base, k8_addr);
1496 rdev->mc.aper_base = (resource_size_t)k8_addr;
1497 rdev->fastfb_working = true;
1498 }
1499 }
1500 }
1501 }
1502
1503 radeon_update_bandwidth_info(rdev);
1504 return 0;
1505}
1506
1507int r600_vram_scratch_init(struct radeon_device *rdev)
1508{
1509 int r;
1510
1511 if (rdev->vram_scratch.robj == NULL) {
1512 r = radeon_bo_create(rdev, RADEON_GPU_PAGE_SIZE,
1513 PAGE_SIZE, true, RADEON_GEM_DOMAIN_VRAM,
1514 0, NULL, NULL, &rdev->vram_scratch.robj);
1515 if (r) {
1516 return r;
1517 }
1518 }
1519
1520 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1521 if (unlikely(r != 0))
1522 return r;
1523 r = radeon_bo_pin(rdev->vram_scratch.robj,
1524 RADEON_GEM_DOMAIN_VRAM, &rdev->vram_scratch.gpu_addr);
1525 if (r) {
1526 radeon_bo_unreserve(rdev->vram_scratch.robj);
1527 return r;
1528 }
1529 r = radeon_bo_kmap(rdev->vram_scratch.robj,
1530 (void **)&rdev->vram_scratch.ptr);
1531 if (r)
1532 radeon_bo_unpin(rdev->vram_scratch.robj);
1533 radeon_bo_unreserve(rdev->vram_scratch.robj);
1534
1535 return r;
1536}
1537
1538void r600_vram_scratch_fini(struct radeon_device *rdev)
1539{
1540 int r;
1541
1542 if (rdev->vram_scratch.robj == NULL) {
1543 return;
1544 }
1545 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1546 if (likely(r == 0)) {
1547 radeon_bo_kunmap(rdev->vram_scratch.robj);
1548 radeon_bo_unpin(rdev->vram_scratch.robj);
1549 radeon_bo_unreserve(rdev->vram_scratch.robj);
1550 }
1551 radeon_bo_unref(&rdev->vram_scratch.robj);
1552}
1553
1554void r600_set_bios_scratch_engine_hung(struct radeon_device *rdev, bool hung)
1555{
1556 u32 tmp = RREG32(R600_BIOS_3_SCRATCH);
1557
1558 if (hung)
1559 tmp |= ATOM_S3_ASIC_GUI_ENGINE_HUNG;
1560 else
1561 tmp &= ~ATOM_S3_ASIC_GUI_ENGINE_HUNG;
1562
1563 WREG32(R600_BIOS_3_SCRATCH, tmp);
1564}
1565
1566static void r600_print_gpu_status_regs(struct radeon_device *rdev)
1567{
1568 dev_info(rdev->dev, " R_008010_GRBM_STATUS = 0x%08X\n",
1569 RREG32(R_008010_GRBM_STATUS));
1570 dev_info(rdev->dev, " R_008014_GRBM_STATUS2 = 0x%08X\n",
1571 RREG32(R_008014_GRBM_STATUS2));
1572 dev_info(rdev->dev, " R_000E50_SRBM_STATUS = 0x%08X\n",
1573 RREG32(R_000E50_SRBM_STATUS));
1574 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
1575 RREG32(CP_STALLED_STAT1));
1576 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
1577 RREG32(CP_STALLED_STAT2));
1578 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
1579 RREG32(CP_BUSY_STAT));
1580 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
1581 RREG32(CP_STAT));
1582 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
1583 RREG32(DMA_STATUS_REG));
1584}
1585
1586static bool r600_is_display_hung(struct radeon_device *rdev)
1587{
1588 u32 crtc_hung = 0;
1589 u32 crtc_status[2];
1590 u32 i, j, tmp;
1591
1592 for (i = 0; i < rdev->num_crtc; i++) {
1593 if (RREG32(AVIVO_D1CRTC_CONTROL + crtc_offsets[i]) & AVIVO_CRTC_EN) {
1594 crtc_status[i] = RREG32(AVIVO_D1CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
1595 crtc_hung |= (1 << i);
1596 }
1597 }
1598
1599 for (j = 0; j < 10; j++) {
1600 for (i = 0; i < rdev->num_crtc; i++) {
1601 if (crtc_hung & (1 << i)) {
1602 tmp = RREG32(AVIVO_D1CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
1603 if (tmp != crtc_status[i])
1604 crtc_hung &= ~(1 << i);
1605 }
1606 }
1607 if (crtc_hung == 0)
1608 return false;
1609 udelay(100);
1610 }
1611
1612 return true;
1613}
1614
1615u32 r600_gpu_check_soft_reset(struct radeon_device *rdev)
1616{
1617 u32 reset_mask = 0;
1618 u32 tmp;
1619
1620
1621 tmp = RREG32(R_008010_GRBM_STATUS);
1622 if (rdev->family >= CHIP_RV770) {
1623 if (G_008010_PA_BUSY(tmp) | G_008010_SC_BUSY(tmp) |
1624 G_008010_SH_BUSY(tmp) | G_008010_SX_BUSY(tmp) |
1625 G_008010_TA_BUSY(tmp) | G_008010_VGT_BUSY(tmp) |
1626 G_008010_DB03_BUSY(tmp) | G_008010_CB03_BUSY(tmp) |
1627 G_008010_SPI03_BUSY(tmp) | G_008010_VGT_BUSY_NO_DMA(tmp))
1628 reset_mask |= RADEON_RESET_GFX;
1629 } else {
1630 if (G_008010_PA_BUSY(tmp) | G_008010_SC_BUSY(tmp) |
1631 G_008010_SH_BUSY(tmp) | G_008010_SX_BUSY(tmp) |
1632 G_008010_TA03_BUSY(tmp) | G_008010_VGT_BUSY(tmp) |
1633 G_008010_DB03_BUSY(tmp) | G_008010_CB03_BUSY(tmp) |
1634 G_008010_SPI03_BUSY(tmp) | G_008010_VGT_BUSY_NO_DMA(tmp))
1635 reset_mask |= RADEON_RESET_GFX;
1636 }
1637
1638 if (G_008010_CF_RQ_PENDING(tmp) | G_008010_PF_RQ_PENDING(tmp) |
1639 G_008010_CP_BUSY(tmp) | G_008010_CP_COHERENCY_BUSY(tmp))
1640 reset_mask |= RADEON_RESET_CP;
1641
1642 if (G_008010_GRBM_EE_BUSY(tmp))
1643 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
1644
1645
1646 tmp = RREG32(DMA_STATUS_REG);
1647 if (!(tmp & DMA_IDLE))
1648 reset_mask |= RADEON_RESET_DMA;
1649
1650
1651 tmp = RREG32(R_000E50_SRBM_STATUS);
1652 if (G_000E50_RLC_RQ_PENDING(tmp) | G_000E50_RLC_BUSY(tmp))
1653 reset_mask |= RADEON_RESET_RLC;
1654
1655 if (G_000E50_IH_BUSY(tmp))
1656 reset_mask |= RADEON_RESET_IH;
1657
1658 if (G_000E50_SEM_BUSY(tmp))
1659 reset_mask |= RADEON_RESET_SEM;
1660
1661 if (G_000E50_GRBM_RQ_PENDING(tmp))
1662 reset_mask |= RADEON_RESET_GRBM;
1663
1664 if (G_000E50_VMC_BUSY(tmp))
1665 reset_mask |= RADEON_RESET_VMC;
1666
1667 if (G_000E50_MCB_BUSY(tmp) | G_000E50_MCDZ_BUSY(tmp) |
1668 G_000E50_MCDY_BUSY(tmp) | G_000E50_MCDX_BUSY(tmp) |
1669 G_000E50_MCDW_BUSY(tmp))
1670 reset_mask |= RADEON_RESET_MC;
1671
1672 if (r600_is_display_hung(rdev))
1673 reset_mask |= RADEON_RESET_DISPLAY;
1674
1675
1676 if (reset_mask & RADEON_RESET_MC) {
1677 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
1678 reset_mask &= ~RADEON_RESET_MC;
1679 }
1680
1681 return reset_mask;
1682}
1683
1684static void r600_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
1685{
1686 struct rv515_mc_save save;
1687 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
1688 u32 tmp;
1689
1690 if (reset_mask == 0)
1691 return;
1692
1693 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
1694
1695 r600_print_gpu_status_regs(rdev);
1696
1697
1698 if (rdev->family >= CHIP_RV770)
1699 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1) | S_0086D8_CP_PFP_HALT(1));
1700 else
1701 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
1702
1703
1704 WREG32(RLC_CNTL, 0);
1705
1706 if (reset_mask & RADEON_RESET_DMA) {
1707
1708 tmp = RREG32(DMA_RB_CNTL);
1709 tmp &= ~DMA_RB_ENABLE;
1710 WREG32(DMA_RB_CNTL, tmp);
1711 }
1712
1713 mdelay(50);
1714
1715 rv515_mc_stop(rdev, &save);
1716 if (r600_mc_wait_for_idle(rdev)) {
1717 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1718 }
1719
1720 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
1721 if (rdev->family >= CHIP_RV770)
1722 grbm_soft_reset |= S_008020_SOFT_RESET_DB(1) |
1723 S_008020_SOFT_RESET_CB(1) |
1724 S_008020_SOFT_RESET_PA(1) |
1725 S_008020_SOFT_RESET_SC(1) |
1726 S_008020_SOFT_RESET_SPI(1) |
1727 S_008020_SOFT_RESET_SX(1) |
1728 S_008020_SOFT_RESET_SH(1) |
1729 S_008020_SOFT_RESET_TC(1) |
1730 S_008020_SOFT_RESET_TA(1) |
1731 S_008020_SOFT_RESET_VC(1) |
1732 S_008020_SOFT_RESET_VGT(1);
1733 else
1734 grbm_soft_reset |= S_008020_SOFT_RESET_CR(1) |
1735 S_008020_SOFT_RESET_DB(1) |
1736 S_008020_SOFT_RESET_CB(1) |
1737 S_008020_SOFT_RESET_PA(1) |
1738 S_008020_SOFT_RESET_SC(1) |
1739 S_008020_SOFT_RESET_SMX(1) |
1740 S_008020_SOFT_RESET_SPI(1) |
1741 S_008020_SOFT_RESET_SX(1) |
1742 S_008020_SOFT_RESET_SH(1) |
1743 S_008020_SOFT_RESET_TC(1) |
1744 S_008020_SOFT_RESET_TA(1) |
1745 S_008020_SOFT_RESET_VC(1) |
1746 S_008020_SOFT_RESET_VGT(1);
1747 }
1748
1749 if (reset_mask & RADEON_RESET_CP) {
1750 grbm_soft_reset |= S_008020_SOFT_RESET_CP(1) |
1751 S_008020_SOFT_RESET_VGT(1);
1752
1753 srbm_soft_reset |= S_000E60_SOFT_RESET_GRBM(1);
1754 }
1755
1756 if (reset_mask & RADEON_RESET_DMA) {
1757 if (rdev->family >= CHIP_RV770)
1758 srbm_soft_reset |= RV770_SOFT_RESET_DMA;
1759 else
1760 srbm_soft_reset |= SOFT_RESET_DMA;
1761 }
1762
1763 if (reset_mask & RADEON_RESET_RLC)
1764 srbm_soft_reset |= S_000E60_SOFT_RESET_RLC(1);
1765
1766 if (reset_mask & RADEON_RESET_SEM)
1767 srbm_soft_reset |= S_000E60_SOFT_RESET_SEM(1);
1768
1769 if (reset_mask & RADEON_RESET_IH)
1770 srbm_soft_reset |= S_000E60_SOFT_RESET_IH(1);
1771
1772 if (reset_mask & RADEON_RESET_GRBM)
1773 srbm_soft_reset |= S_000E60_SOFT_RESET_GRBM(1);
1774
1775 if (!(rdev->flags & RADEON_IS_IGP)) {
1776 if (reset_mask & RADEON_RESET_MC)
1777 srbm_soft_reset |= S_000E60_SOFT_RESET_MC(1);
1778 }
1779
1780 if (reset_mask & RADEON_RESET_VMC)
1781 srbm_soft_reset |= S_000E60_SOFT_RESET_VMC(1);
1782
1783 if (grbm_soft_reset) {
1784 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1785 tmp |= grbm_soft_reset;
1786 dev_info(rdev->dev, "R_008020_GRBM_SOFT_RESET=0x%08X\n", tmp);
1787 WREG32(R_008020_GRBM_SOFT_RESET, tmp);
1788 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1789
1790 udelay(50);
1791
1792 tmp &= ~grbm_soft_reset;
1793 WREG32(R_008020_GRBM_SOFT_RESET, tmp);
1794 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1795 }
1796
1797 if (srbm_soft_reset) {
1798 tmp = RREG32(SRBM_SOFT_RESET);
1799 tmp |= srbm_soft_reset;
1800 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
1801 WREG32(SRBM_SOFT_RESET, tmp);
1802 tmp = RREG32(SRBM_SOFT_RESET);
1803
1804 udelay(50);
1805
1806 tmp &= ~srbm_soft_reset;
1807 WREG32(SRBM_SOFT_RESET, tmp);
1808 tmp = RREG32(SRBM_SOFT_RESET);
1809 }
1810
1811
1812 mdelay(1);
1813
1814 rv515_mc_resume(rdev, &save);
1815 udelay(50);
1816
1817 r600_print_gpu_status_regs(rdev);
1818}
1819
1820static void r600_gpu_pci_config_reset(struct radeon_device *rdev)
1821{
1822 struct rv515_mc_save save;
1823 u32 tmp, i;
1824
1825 dev_info(rdev->dev, "GPU pci config reset\n");
1826
1827
1828
1829
1830 if (rdev->family >= CHIP_RV770)
1831 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1) | S_0086D8_CP_PFP_HALT(1));
1832 else
1833 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
1834
1835
1836 WREG32(RLC_CNTL, 0);
1837
1838
1839 tmp = RREG32(DMA_RB_CNTL);
1840 tmp &= ~DMA_RB_ENABLE;
1841 WREG32(DMA_RB_CNTL, tmp);
1842
1843 mdelay(50);
1844
1845
1846 if (rdev->family >= CHIP_RV770)
1847 rv770_set_clk_bypass_mode(rdev);
1848
1849 pci_clear_master(rdev->pdev);
1850
1851 rv515_mc_stop(rdev, &save);
1852 if (r600_mc_wait_for_idle(rdev)) {
1853 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1854 }
1855
1856
1857 tmp = RREG32(BUS_CNTL);
1858 tmp |= VGA_COHE_SPEC_TIMER_DIS;
1859 WREG32(BUS_CNTL, tmp);
1860
1861 tmp = RREG32(BIF_SCRATCH0);
1862
1863
1864 radeon_pci_config_reset(rdev);
1865 mdelay(1);
1866
1867
1868 tmp = SOFT_RESET_BIF;
1869 WREG32(SRBM_SOFT_RESET, tmp);
1870 mdelay(1);
1871 WREG32(SRBM_SOFT_RESET, 0);
1872
1873
1874 for (i = 0; i < rdev->usec_timeout; i++) {
1875 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
1876 break;
1877 udelay(1);
1878 }
1879}
1880
1881int r600_asic_reset(struct radeon_device *rdev, bool hard)
1882{
1883 u32 reset_mask;
1884
1885 if (hard) {
1886 r600_gpu_pci_config_reset(rdev);
1887 return 0;
1888 }
1889
1890 reset_mask = r600_gpu_check_soft_reset(rdev);
1891
1892 if (reset_mask)
1893 r600_set_bios_scratch_engine_hung(rdev, true);
1894
1895
1896 r600_gpu_soft_reset(rdev, reset_mask);
1897
1898 reset_mask = r600_gpu_check_soft_reset(rdev);
1899
1900
1901 if (reset_mask && radeon_hard_reset)
1902 r600_gpu_pci_config_reset(rdev);
1903
1904 reset_mask = r600_gpu_check_soft_reset(rdev);
1905
1906 if (!reset_mask)
1907 r600_set_bios_scratch_engine_hung(rdev, false);
1908
1909 return 0;
1910}
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
1922{
1923 u32 reset_mask = r600_gpu_check_soft_reset(rdev);
1924
1925 if (!(reset_mask & (RADEON_RESET_GFX |
1926 RADEON_RESET_COMPUTE |
1927 RADEON_RESET_CP))) {
1928 radeon_ring_lockup_update(rdev, ring);
1929 return false;
1930 }
1931 return radeon_ring_test_lockup(rdev, ring);
1932}
1933
1934u32 r6xx_remap_render_backend(struct radeon_device *rdev,
1935 u32 tiling_pipe_num,
1936 u32 max_rb_num,
1937 u32 total_max_rb_num,
1938 u32 disabled_rb_mask)
1939{
1940 u32 rendering_pipe_num, rb_num_width, req_rb_num;
1941 u32 pipe_rb_ratio, pipe_rb_remain, tmp;
1942 u32 data = 0, mask = 1 << (max_rb_num - 1);
1943 unsigned i, j;
1944
1945
1946 tmp = disabled_rb_mask | ((0xff << max_rb_num) & 0xff);
1947
1948 if ((tmp & 0xff) != 0xff)
1949 disabled_rb_mask = tmp;
1950
1951 rendering_pipe_num = 1 << tiling_pipe_num;
1952 req_rb_num = total_max_rb_num - r600_count_pipe_bits(disabled_rb_mask);
1953 BUG_ON(rendering_pipe_num < req_rb_num);
1954
1955 pipe_rb_ratio = rendering_pipe_num / req_rb_num;
1956 pipe_rb_remain = rendering_pipe_num - pipe_rb_ratio * req_rb_num;
1957
1958 if (rdev->family <= CHIP_RV740) {
1959
1960 rb_num_width = 2;
1961 } else {
1962
1963 rb_num_width = 4;
1964 }
1965
1966 for (i = 0; i < max_rb_num; i++) {
1967 if (!(mask & disabled_rb_mask)) {
1968 for (j = 0; j < pipe_rb_ratio; j++) {
1969 data <<= rb_num_width;
1970 data |= max_rb_num - i - 1;
1971 }
1972 if (pipe_rb_remain) {
1973 data <<= rb_num_width;
1974 data |= max_rb_num - i - 1;
1975 pipe_rb_remain--;
1976 }
1977 }
1978 mask >>= 1;
1979 }
1980
1981 return data;
1982}
1983
1984int r600_count_pipe_bits(uint32_t val)
1985{
1986 return hweight32(val);
1987}
1988
1989static void r600_gpu_init(struct radeon_device *rdev)
1990{
1991 u32 tiling_config;
1992 u32 ramcfg;
1993 u32 cc_gc_shader_pipe_config;
1994 u32 tmp;
1995 int i, j;
1996 u32 sq_config;
1997 u32 sq_gpr_resource_mgmt_1 = 0;
1998 u32 sq_gpr_resource_mgmt_2 = 0;
1999 u32 sq_thread_resource_mgmt = 0;
2000 u32 sq_stack_resource_mgmt_1 = 0;
2001 u32 sq_stack_resource_mgmt_2 = 0;
2002 u32 disabled_rb_mask;
2003
2004 rdev->config.r600.tiling_group_size = 256;
2005 switch (rdev->family) {
2006 case CHIP_R600:
2007 rdev->config.r600.max_pipes = 4;
2008 rdev->config.r600.max_tile_pipes = 8;
2009 rdev->config.r600.max_simds = 4;
2010 rdev->config.r600.max_backends = 4;
2011 rdev->config.r600.max_gprs = 256;
2012 rdev->config.r600.max_threads = 192;
2013 rdev->config.r600.max_stack_entries = 256;
2014 rdev->config.r600.max_hw_contexts = 8;
2015 rdev->config.r600.max_gs_threads = 16;
2016 rdev->config.r600.sx_max_export_size = 128;
2017 rdev->config.r600.sx_max_export_pos_size = 16;
2018 rdev->config.r600.sx_max_export_smx_size = 128;
2019 rdev->config.r600.sq_num_cf_insts = 2;
2020 break;
2021 case CHIP_RV630:
2022 case CHIP_RV635:
2023 rdev->config.r600.max_pipes = 2;
2024 rdev->config.r600.max_tile_pipes = 2;
2025 rdev->config.r600.max_simds = 3;
2026 rdev->config.r600.max_backends = 1;
2027 rdev->config.r600.max_gprs = 128;
2028 rdev->config.r600.max_threads = 192;
2029 rdev->config.r600.max_stack_entries = 128;
2030 rdev->config.r600.max_hw_contexts = 8;
2031 rdev->config.r600.max_gs_threads = 4;
2032 rdev->config.r600.sx_max_export_size = 128;
2033 rdev->config.r600.sx_max_export_pos_size = 16;
2034 rdev->config.r600.sx_max_export_smx_size = 128;
2035 rdev->config.r600.sq_num_cf_insts = 2;
2036 break;
2037 case CHIP_RV610:
2038 case CHIP_RV620:
2039 case CHIP_RS780:
2040 case CHIP_RS880:
2041 rdev->config.r600.max_pipes = 1;
2042 rdev->config.r600.max_tile_pipes = 1;
2043 rdev->config.r600.max_simds = 2;
2044 rdev->config.r600.max_backends = 1;
2045 rdev->config.r600.max_gprs = 128;
2046 rdev->config.r600.max_threads = 192;
2047 rdev->config.r600.max_stack_entries = 128;
2048 rdev->config.r600.max_hw_contexts = 4;
2049 rdev->config.r600.max_gs_threads = 4;
2050 rdev->config.r600.sx_max_export_size = 128;
2051 rdev->config.r600.sx_max_export_pos_size = 16;
2052 rdev->config.r600.sx_max_export_smx_size = 128;
2053 rdev->config.r600.sq_num_cf_insts = 1;
2054 break;
2055 case CHIP_RV670:
2056 rdev->config.r600.max_pipes = 4;
2057 rdev->config.r600.max_tile_pipes = 4;
2058 rdev->config.r600.max_simds = 4;
2059 rdev->config.r600.max_backends = 4;
2060 rdev->config.r600.max_gprs = 192;
2061 rdev->config.r600.max_threads = 192;
2062 rdev->config.r600.max_stack_entries = 256;
2063 rdev->config.r600.max_hw_contexts = 8;
2064 rdev->config.r600.max_gs_threads = 16;
2065 rdev->config.r600.sx_max_export_size = 128;
2066 rdev->config.r600.sx_max_export_pos_size = 16;
2067 rdev->config.r600.sx_max_export_smx_size = 128;
2068 rdev->config.r600.sq_num_cf_insts = 2;
2069 break;
2070 default:
2071 break;
2072 }
2073
2074
2075 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2076 WREG32((0x2c14 + j), 0x00000000);
2077 WREG32((0x2c18 + j), 0x00000000);
2078 WREG32((0x2c1c + j), 0x00000000);
2079 WREG32((0x2c20 + j), 0x00000000);
2080 WREG32((0x2c24 + j), 0x00000000);
2081 }
2082
2083 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
2084
2085
2086 tiling_config = 0;
2087 ramcfg = RREG32(RAMCFG);
2088 switch (rdev->config.r600.max_tile_pipes) {
2089 case 1:
2090 tiling_config |= PIPE_TILING(0);
2091 break;
2092 case 2:
2093 tiling_config |= PIPE_TILING(1);
2094 break;
2095 case 4:
2096 tiling_config |= PIPE_TILING(2);
2097 break;
2098 case 8:
2099 tiling_config |= PIPE_TILING(3);
2100 break;
2101 default:
2102 break;
2103 }
2104 rdev->config.r600.tiling_npipes = rdev->config.r600.max_tile_pipes;
2105 rdev->config.r600.tiling_nbanks = 4 << ((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
2106 tiling_config |= BANK_TILING((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
2107 tiling_config |= GROUP_SIZE((ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
2108
2109 tmp = (ramcfg & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
2110 if (tmp > 3) {
2111 tiling_config |= ROW_TILING(3);
2112 tiling_config |= SAMPLE_SPLIT(3);
2113 } else {
2114 tiling_config |= ROW_TILING(tmp);
2115 tiling_config |= SAMPLE_SPLIT(tmp);
2116 }
2117 tiling_config |= BANK_SWAPS(1);
2118
2119 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0x00ffff00;
2120 tmp = rdev->config.r600.max_simds -
2121 r600_count_pipe_bits((cc_gc_shader_pipe_config >> 16) & R6XX_MAX_SIMDS_MASK);
2122 rdev->config.r600.active_simds = tmp;
2123
2124 disabled_rb_mask = (RREG32(CC_RB_BACKEND_DISABLE) >> 16) & R6XX_MAX_BACKENDS_MASK;
2125 tmp = 0;
2126 for (i = 0; i < rdev->config.r600.max_backends; i++)
2127 tmp |= (1 << i);
2128
2129 if ((disabled_rb_mask & tmp) == tmp) {
2130 for (i = 0; i < rdev->config.r600.max_backends; i++)
2131 disabled_rb_mask &= ~(1 << i);
2132 }
2133 tmp = (tiling_config & PIPE_TILING__MASK) >> PIPE_TILING__SHIFT;
2134 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.r600.max_backends,
2135 R6XX_MAX_BACKENDS, disabled_rb_mask);
2136 tiling_config |= tmp << 16;
2137 rdev->config.r600.backend_map = tmp;
2138
2139 rdev->config.r600.tile_config = tiling_config;
2140 WREG32(GB_TILING_CONFIG, tiling_config);
2141 WREG32(DCP_TILING_CONFIG, tiling_config & 0xffff);
2142 WREG32(HDP_TILING_CONFIG, tiling_config & 0xffff);
2143 WREG32(DMA_TILING_CONFIG, tiling_config & 0xffff);
2144
2145 tmp = R6XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK) >> 8);
2146 WREG32(VGT_OUT_DEALLOC_CNTL, (tmp * 4) & DEALLOC_DIST_MASK);
2147 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((tmp * 4) - 2) & VTX_REUSE_DEPTH_MASK);
2148
2149
2150 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) | ROQ_IB2_START(0x2b)));
2151 WREG32(CP_MEQ_THRESHOLDS, (MEQ_END(0x40) | ROQ_END(0x40)));
2152
2153 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO | SYNC_GRADIENT |
2154 SYNC_WALKER | SYNC_ALIGNER));
2155
2156 if (rdev->family == CHIP_RV670)
2157 WREG32(ARB_GDEC_RD_CNTL, 0x00000021);
2158
2159 tmp = RREG32(SX_DEBUG_1);
2160 tmp |= SMX_EVENT_RELEASE;
2161 if ((rdev->family > CHIP_R600))
2162 tmp |= ENABLE_NEW_SMX_ADDRESS;
2163 WREG32(SX_DEBUG_1, tmp);
2164
2165 if (((rdev->family) == CHIP_R600) ||
2166 ((rdev->family) == CHIP_RV630) ||
2167 ((rdev->family) == CHIP_RV610) ||
2168 ((rdev->family) == CHIP_RV620) ||
2169 ((rdev->family) == CHIP_RS780) ||
2170 ((rdev->family) == CHIP_RS880)) {
2171 WREG32(DB_DEBUG, PREZ_MUST_WAIT_FOR_POSTZ_DONE);
2172 } else {
2173 WREG32(DB_DEBUG, 0);
2174 }
2175 WREG32(DB_WATERMARKS, (DEPTH_FREE(4) | DEPTH_CACHELINE_FREE(16) |
2176 DEPTH_FLUSH(16) | DEPTH_PENDING_FREE(4)));
2177
2178 WREG32(PA_SC_MULTI_CHIP_CNTL, 0);
2179 WREG32(VGT_NUM_INSTANCES, 0);
2180
2181 WREG32(SPI_CONFIG_CNTL, GPR_WRITE_PRIORITY(0));
2182 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(0));
2183
2184 tmp = RREG32(SQ_MS_FIFO_SIZES);
2185 if (((rdev->family) == CHIP_RV610) ||
2186 ((rdev->family) == CHIP_RV620) ||
2187 ((rdev->family) == CHIP_RS780) ||
2188 ((rdev->family) == CHIP_RS880)) {
2189 tmp = (CACHE_FIFO_SIZE(0xa) |
2190 FETCH_FIFO_HIWATER(0xa) |
2191 DONE_FIFO_HIWATER(0xe0) |
2192 ALU_UPDATE_FIFO_HIWATER(0x8));
2193 } else if (((rdev->family) == CHIP_R600) ||
2194 ((rdev->family) == CHIP_RV630)) {
2195 tmp &= ~DONE_FIFO_HIWATER(0xff);
2196 tmp |= DONE_FIFO_HIWATER(0x4);
2197 }
2198 WREG32(SQ_MS_FIFO_SIZES, tmp);
2199
2200
2201
2202
2203 sq_config = RREG32(SQ_CONFIG);
2204 sq_config &= ~(PS_PRIO(3) |
2205 VS_PRIO(3) |
2206 GS_PRIO(3) |
2207 ES_PRIO(3));
2208 sq_config |= (DX9_CONSTS |
2209 VC_ENABLE |
2210 PS_PRIO(0) |
2211 VS_PRIO(1) |
2212 GS_PRIO(2) |
2213 ES_PRIO(3));
2214
2215 if ((rdev->family) == CHIP_R600) {
2216 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(124) |
2217 NUM_VS_GPRS(124) |
2218 NUM_CLAUSE_TEMP_GPRS(4));
2219 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(0) |
2220 NUM_ES_GPRS(0));
2221 sq_thread_resource_mgmt = (NUM_PS_THREADS(136) |
2222 NUM_VS_THREADS(48) |
2223 NUM_GS_THREADS(4) |
2224 NUM_ES_THREADS(4));
2225 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(128) |
2226 NUM_VS_STACK_ENTRIES(128));
2227 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(0) |
2228 NUM_ES_STACK_ENTRIES(0));
2229 } else if (((rdev->family) == CHIP_RV610) ||
2230 ((rdev->family) == CHIP_RV620) ||
2231 ((rdev->family) == CHIP_RS780) ||
2232 ((rdev->family) == CHIP_RS880)) {
2233
2234 sq_config &= ~VC_ENABLE;
2235
2236 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
2237 NUM_VS_GPRS(44) |
2238 NUM_CLAUSE_TEMP_GPRS(2));
2239 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(17) |
2240 NUM_ES_GPRS(17));
2241 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
2242 NUM_VS_THREADS(78) |
2243 NUM_GS_THREADS(4) |
2244 NUM_ES_THREADS(31));
2245 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(40) |
2246 NUM_VS_STACK_ENTRIES(40));
2247 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(32) |
2248 NUM_ES_STACK_ENTRIES(16));
2249 } else if (((rdev->family) == CHIP_RV630) ||
2250 ((rdev->family) == CHIP_RV635)) {
2251 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
2252 NUM_VS_GPRS(44) |
2253 NUM_CLAUSE_TEMP_GPRS(2));
2254 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(18) |
2255 NUM_ES_GPRS(18));
2256 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
2257 NUM_VS_THREADS(78) |
2258 NUM_GS_THREADS(4) |
2259 NUM_ES_THREADS(31));
2260 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(40) |
2261 NUM_VS_STACK_ENTRIES(40));
2262 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(32) |
2263 NUM_ES_STACK_ENTRIES(16));
2264 } else if ((rdev->family) == CHIP_RV670) {
2265 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
2266 NUM_VS_GPRS(44) |
2267 NUM_CLAUSE_TEMP_GPRS(2));
2268 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(17) |
2269 NUM_ES_GPRS(17));
2270 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
2271 NUM_VS_THREADS(78) |
2272 NUM_GS_THREADS(4) |
2273 NUM_ES_THREADS(31));
2274 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(64) |
2275 NUM_VS_STACK_ENTRIES(64));
2276 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(64) |
2277 NUM_ES_STACK_ENTRIES(64));
2278 }
2279
2280 WREG32(SQ_CONFIG, sq_config);
2281 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
2282 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
2283 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
2284 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
2285 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
2286
2287 if (((rdev->family) == CHIP_RV610) ||
2288 ((rdev->family) == CHIP_RV620) ||
2289 ((rdev->family) == CHIP_RS780) ||
2290 ((rdev->family) == CHIP_RS880)) {
2291 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(TC_ONLY));
2292 } else {
2293 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(VC_AND_TC));
2294 }
2295
2296
2297 WREG32(PA_SC_AA_SAMPLE_LOCS_2S, (S0_X(0xc) | S0_Y(0x4) |
2298 S1_X(0x4) | S1_Y(0xc)));
2299 WREG32(PA_SC_AA_SAMPLE_LOCS_4S, (S0_X(0xe) | S0_Y(0xe) |
2300 S1_X(0x2) | S1_Y(0x2) |
2301 S2_X(0xa) | S2_Y(0x6) |
2302 S3_X(0x6) | S3_Y(0xa)));
2303 WREG32(PA_SC_AA_SAMPLE_LOCS_8S_WD0, (S0_X(0xe) | S0_Y(0xb) |
2304 S1_X(0x4) | S1_Y(0xc) |
2305 S2_X(0x1) | S2_Y(0x6) |
2306 S3_X(0xa) | S3_Y(0xe)));
2307 WREG32(PA_SC_AA_SAMPLE_LOCS_8S_WD1, (S4_X(0x6) | S4_Y(0x1) |
2308 S5_X(0x0) | S5_Y(0x0) |
2309 S6_X(0xb) | S6_Y(0x4) |
2310 S7_X(0x7) | S7_Y(0x8)));
2311
2312 WREG32(VGT_STRMOUT_EN, 0);
2313 tmp = rdev->config.r600.max_pipes * 16;
2314 switch (rdev->family) {
2315 case CHIP_RV610:
2316 case CHIP_RV620:
2317 case CHIP_RS780:
2318 case CHIP_RS880:
2319 tmp += 32;
2320 break;
2321 case CHIP_RV670:
2322 tmp += 128;
2323 break;
2324 default:
2325 break;
2326 }
2327 if (tmp > 256) {
2328 tmp = 256;
2329 }
2330 WREG32(VGT_ES_PER_GS, 128);
2331 WREG32(VGT_GS_PER_ES, tmp);
2332 WREG32(VGT_GS_PER_VS, 2);
2333 WREG32(VGT_GS_VERTEX_REUSE, 16);
2334
2335
2336 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
2337 WREG32(VGT_STRMOUT_EN, 0);
2338 WREG32(SX_MISC, 0);
2339 WREG32(PA_SC_MODE_CNTL, 0);
2340 WREG32(PA_SC_AA_CONFIG, 0);
2341 WREG32(PA_SC_LINE_STIPPLE, 0);
2342 WREG32(SPI_INPUT_Z, 0);
2343 WREG32(SPI_PS_IN_CONTROL_0, NUM_INTERP(2));
2344 WREG32(CB_COLOR7_FRAG, 0);
2345
2346
2347 WREG32(CB_COLOR0_BASE, 0);
2348 WREG32(CB_COLOR1_BASE, 0);
2349 WREG32(CB_COLOR2_BASE, 0);
2350 WREG32(CB_COLOR3_BASE, 0);
2351 WREG32(CB_COLOR4_BASE, 0);
2352 WREG32(CB_COLOR5_BASE, 0);
2353 WREG32(CB_COLOR6_BASE, 0);
2354 WREG32(CB_COLOR7_BASE, 0);
2355 WREG32(CB_COLOR7_FRAG, 0);
2356
2357 switch (rdev->family) {
2358 case CHIP_RV610:
2359 case CHIP_RV620:
2360 case CHIP_RS780:
2361 case CHIP_RS880:
2362 tmp = TC_L2_SIZE(8);
2363 break;
2364 case CHIP_RV630:
2365 case CHIP_RV635:
2366 tmp = TC_L2_SIZE(4);
2367 break;
2368 case CHIP_R600:
2369 tmp = TC_L2_SIZE(0) | L2_DISABLE_LATE_HIT;
2370 break;
2371 default:
2372 tmp = TC_L2_SIZE(0);
2373 break;
2374 }
2375 WREG32(TC_CNTL, tmp);
2376
2377 tmp = RREG32(HDP_HOST_PATH_CNTL);
2378 WREG32(HDP_HOST_PATH_CNTL, tmp);
2379
2380 tmp = RREG32(ARB_POP);
2381 tmp |= ENABLE_TC128;
2382 WREG32(ARB_POP, tmp);
2383
2384 WREG32(PA_SC_MULTI_CHIP_CNTL, 0);
2385 WREG32(PA_CL_ENHANCE, (CLIP_VTX_REORDER_ENA |
2386 NUM_CLIP_SEQ(3)));
2387 WREG32(PA_SC_ENHANCE, FORCE_EOV_MAX_CLK_CNT(4095));
2388 WREG32(VC_ENHANCE, 0);
2389}
2390
2391
2392
2393
2394
2395u32 r600_pciep_rreg(struct radeon_device *rdev, u32 reg)
2396{
2397 unsigned long flags;
2398 u32 r;
2399
2400 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
2401 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
2402 (void)RREG32(PCIE_PORT_INDEX);
2403 r = RREG32(PCIE_PORT_DATA);
2404 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
2405 return r;
2406}
2407
2408void r600_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
2409{
2410 unsigned long flags;
2411
2412 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
2413 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
2414 (void)RREG32(PCIE_PORT_INDEX);
2415 WREG32(PCIE_PORT_DATA, (v));
2416 (void)RREG32(PCIE_PORT_DATA);
2417 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
2418}
2419
2420
2421
2422
2423void r600_cp_stop(struct radeon_device *rdev)
2424{
2425 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
2426 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
2427 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
2428 WREG32(SCRATCH_UMSK, 0);
2429 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
2430}
2431
2432int r600_init_microcode(struct radeon_device *rdev)
2433{
2434 const char *chip_name;
2435 const char *rlc_chip_name;
2436 const char *smc_chip_name = "RV770";
2437 size_t pfp_req_size, me_req_size, rlc_req_size, smc_req_size = 0;
2438 char fw_name[30];
2439 int err;
2440
2441 DRM_DEBUG("\n");
2442
2443 switch (rdev->family) {
2444 case CHIP_R600:
2445 chip_name = "R600";
2446 rlc_chip_name = "R600";
2447 break;
2448 case CHIP_RV610:
2449 chip_name = "RV610";
2450 rlc_chip_name = "R600";
2451 break;
2452 case CHIP_RV630:
2453 chip_name = "RV630";
2454 rlc_chip_name = "R600";
2455 break;
2456 case CHIP_RV620:
2457 chip_name = "RV620";
2458 rlc_chip_name = "R600";
2459 break;
2460 case CHIP_RV635:
2461 chip_name = "RV635";
2462 rlc_chip_name = "R600";
2463 break;
2464 case CHIP_RV670:
2465 chip_name = "RV670";
2466 rlc_chip_name = "R600";
2467 break;
2468 case CHIP_RS780:
2469 case CHIP_RS880:
2470 chip_name = "RS780";
2471 rlc_chip_name = "R600";
2472 break;
2473 case CHIP_RV770:
2474 chip_name = "RV770";
2475 rlc_chip_name = "R700";
2476 smc_chip_name = "RV770";
2477 smc_req_size = ALIGN(RV770_SMC_UCODE_SIZE, 4);
2478 break;
2479 case CHIP_RV730:
2480 chip_name = "RV730";
2481 rlc_chip_name = "R700";
2482 smc_chip_name = "RV730";
2483 smc_req_size = ALIGN(RV730_SMC_UCODE_SIZE, 4);
2484 break;
2485 case CHIP_RV710:
2486 chip_name = "RV710";
2487 rlc_chip_name = "R700";
2488 smc_chip_name = "RV710";
2489 smc_req_size = ALIGN(RV710_SMC_UCODE_SIZE, 4);
2490 break;
2491 case CHIP_RV740:
2492 chip_name = "RV730";
2493 rlc_chip_name = "R700";
2494 smc_chip_name = "RV740";
2495 smc_req_size = ALIGN(RV740_SMC_UCODE_SIZE, 4);
2496 break;
2497 case CHIP_CEDAR:
2498 chip_name = "CEDAR";
2499 rlc_chip_name = "CEDAR";
2500 smc_chip_name = "CEDAR";
2501 smc_req_size = ALIGN(CEDAR_SMC_UCODE_SIZE, 4);
2502 break;
2503 case CHIP_REDWOOD:
2504 chip_name = "REDWOOD";
2505 rlc_chip_name = "REDWOOD";
2506 smc_chip_name = "REDWOOD";
2507 smc_req_size = ALIGN(REDWOOD_SMC_UCODE_SIZE, 4);
2508 break;
2509 case CHIP_JUNIPER:
2510 chip_name = "JUNIPER";
2511 rlc_chip_name = "JUNIPER";
2512 smc_chip_name = "JUNIPER";
2513 smc_req_size = ALIGN(JUNIPER_SMC_UCODE_SIZE, 4);
2514 break;
2515 case CHIP_CYPRESS:
2516 case CHIP_HEMLOCK:
2517 chip_name = "CYPRESS";
2518 rlc_chip_name = "CYPRESS";
2519 smc_chip_name = "CYPRESS";
2520 smc_req_size = ALIGN(CYPRESS_SMC_UCODE_SIZE, 4);
2521 break;
2522 case CHIP_PALM:
2523 chip_name = "PALM";
2524 rlc_chip_name = "SUMO";
2525 break;
2526 case CHIP_SUMO:
2527 chip_name = "SUMO";
2528 rlc_chip_name = "SUMO";
2529 break;
2530 case CHIP_SUMO2:
2531 chip_name = "SUMO2";
2532 rlc_chip_name = "SUMO";
2533 break;
2534 default: BUG();
2535 }
2536
2537 if (rdev->family >= CHIP_CEDAR) {
2538 pfp_req_size = EVERGREEN_PFP_UCODE_SIZE * 4;
2539 me_req_size = EVERGREEN_PM4_UCODE_SIZE * 4;
2540 rlc_req_size = EVERGREEN_RLC_UCODE_SIZE * 4;
2541 } else if (rdev->family >= CHIP_RV770) {
2542 pfp_req_size = R700_PFP_UCODE_SIZE * 4;
2543 me_req_size = R700_PM4_UCODE_SIZE * 4;
2544 rlc_req_size = R700_RLC_UCODE_SIZE * 4;
2545 } else {
2546 pfp_req_size = R600_PFP_UCODE_SIZE * 4;
2547 me_req_size = R600_PM4_UCODE_SIZE * 12;
2548 rlc_req_size = R600_RLC_UCODE_SIZE * 4;
2549 }
2550
2551 DRM_INFO("Loading %s Microcode\n", chip_name);
2552
2553 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
2554 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
2555 if (err)
2556 goto out;
2557 if (rdev->pfp_fw->size != pfp_req_size) {
2558 pr_err("r600_cp: Bogus length %zu in firmware \"%s\"\n",
2559 rdev->pfp_fw->size, fw_name);
2560 err = -EINVAL;
2561 goto out;
2562 }
2563
2564 snprintf(fw_name, sizeof(fw_name), "radeon/%s_me.bin", chip_name);
2565 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
2566 if (err)
2567 goto out;
2568 if (rdev->me_fw->size != me_req_size) {
2569 pr_err("r600_cp: Bogus length %zu in firmware \"%s\"\n",
2570 rdev->me_fw->size, fw_name);
2571 err = -EINVAL;
2572 goto out;
2573 }
2574
2575 snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
2576 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
2577 if (err)
2578 goto out;
2579 if (rdev->rlc_fw->size != rlc_req_size) {
2580 pr_err("r600_rlc: Bogus length %zu in firmware \"%s\"\n",
2581 rdev->rlc_fw->size, fw_name);
2582 err = -EINVAL;
2583 goto out;
2584 }
2585
2586 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_HEMLOCK)) {
2587 snprintf(fw_name, sizeof(fw_name), "radeon/%s_smc.bin", smc_chip_name);
2588 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
2589 if (err) {
2590 pr_err("smc: error loading firmware \"%s\"\n", fw_name);
2591 release_firmware(rdev->smc_fw);
2592 rdev->smc_fw = NULL;
2593 err = 0;
2594 } else if (rdev->smc_fw->size != smc_req_size) {
2595 pr_err("smc: Bogus length %zu in firmware \"%s\"\n",
2596 rdev->smc_fw->size, fw_name);
2597 err = -EINVAL;
2598 }
2599 }
2600
2601out:
2602 if (err) {
2603 if (err != -EINVAL)
2604 pr_err("r600_cp: Failed to load firmware \"%s\"\n",
2605 fw_name);
2606 release_firmware(rdev->pfp_fw);
2607 rdev->pfp_fw = NULL;
2608 release_firmware(rdev->me_fw);
2609 rdev->me_fw = NULL;
2610 release_firmware(rdev->rlc_fw);
2611 rdev->rlc_fw = NULL;
2612 release_firmware(rdev->smc_fw);
2613 rdev->smc_fw = NULL;
2614 }
2615 return err;
2616}
2617
2618u32 r600_gfx_get_rptr(struct radeon_device *rdev,
2619 struct radeon_ring *ring)
2620{
2621 u32 rptr;
2622
2623 if (rdev->wb.enabled)
2624 rptr = rdev->wb.wb[ring->rptr_offs/4];
2625 else
2626 rptr = RREG32(R600_CP_RB_RPTR);
2627
2628 return rptr;
2629}
2630
2631u32 r600_gfx_get_wptr(struct radeon_device *rdev,
2632 struct radeon_ring *ring)
2633{
2634 return RREG32(R600_CP_RB_WPTR);
2635}
2636
2637void r600_gfx_set_wptr(struct radeon_device *rdev,
2638 struct radeon_ring *ring)
2639{
2640 WREG32(R600_CP_RB_WPTR, ring->wptr);
2641 (void)RREG32(R600_CP_RB_WPTR);
2642}
2643
2644static int r600_cp_load_microcode(struct radeon_device *rdev)
2645{
2646 const __be32 *fw_data;
2647 int i;
2648
2649 if (!rdev->me_fw || !rdev->pfp_fw)
2650 return -EINVAL;
2651
2652 r600_cp_stop(rdev);
2653
2654 WREG32(CP_RB_CNTL,
2655#ifdef __BIG_ENDIAN
2656 BUF_SWAP_32BIT |
2657#endif
2658 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2659
2660
2661 WREG32(GRBM_SOFT_RESET, SOFT_RESET_CP);
2662 RREG32(GRBM_SOFT_RESET);
2663 mdelay(15);
2664 WREG32(GRBM_SOFT_RESET, 0);
2665
2666 WREG32(CP_ME_RAM_WADDR, 0);
2667
2668 fw_data = (const __be32 *)rdev->me_fw->data;
2669 WREG32(CP_ME_RAM_WADDR, 0);
2670 for (i = 0; i < R600_PM4_UCODE_SIZE * 3; i++)
2671 WREG32(CP_ME_RAM_DATA,
2672 be32_to_cpup(fw_data++));
2673
2674 fw_data = (const __be32 *)rdev->pfp_fw->data;
2675 WREG32(CP_PFP_UCODE_ADDR, 0);
2676 for (i = 0; i < R600_PFP_UCODE_SIZE; i++)
2677 WREG32(CP_PFP_UCODE_DATA,
2678 be32_to_cpup(fw_data++));
2679
2680 WREG32(CP_PFP_UCODE_ADDR, 0);
2681 WREG32(CP_ME_RAM_WADDR, 0);
2682 WREG32(CP_ME_RAM_RADDR, 0);
2683 return 0;
2684}
2685
2686int r600_cp_start(struct radeon_device *rdev)
2687{
2688 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2689 int r;
2690 uint32_t cp_me;
2691
2692 r = radeon_ring_lock(rdev, ring, 7);
2693 if (r) {
2694 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2695 return r;
2696 }
2697 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2698 radeon_ring_write(ring, 0x1);
2699 if (rdev->family >= CHIP_RV770) {
2700 radeon_ring_write(ring, 0x0);
2701 radeon_ring_write(ring, rdev->config.rv770.max_hw_contexts - 1);
2702 } else {
2703 radeon_ring_write(ring, 0x3);
2704 radeon_ring_write(ring, rdev->config.r600.max_hw_contexts - 1);
2705 }
2706 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2707 radeon_ring_write(ring, 0);
2708 radeon_ring_write(ring, 0);
2709 radeon_ring_unlock_commit(rdev, ring, false);
2710
2711 cp_me = 0xff;
2712 WREG32(R_0086D8_CP_ME_CNTL, cp_me);
2713 return 0;
2714}
2715
2716int r600_cp_resume(struct radeon_device *rdev)
2717{
2718 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2719 u32 tmp;
2720 u32 rb_bufsz;
2721 int r;
2722
2723
2724 WREG32(GRBM_SOFT_RESET, SOFT_RESET_CP);
2725 RREG32(GRBM_SOFT_RESET);
2726 mdelay(15);
2727 WREG32(GRBM_SOFT_RESET, 0);
2728
2729
2730 rb_bufsz = order_base_2(ring->ring_size / 8);
2731 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2732#ifdef __BIG_ENDIAN
2733 tmp |= BUF_SWAP_32BIT;
2734#endif
2735 WREG32(CP_RB_CNTL, tmp);
2736 WREG32(CP_SEM_WAIT_TIMER, 0x0);
2737
2738
2739 WREG32(CP_RB_WPTR_DELAY, 0);
2740
2741
2742 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2743 WREG32(CP_RB_RPTR_WR, 0);
2744 ring->wptr = 0;
2745 WREG32(CP_RB_WPTR, ring->wptr);
2746
2747
2748 WREG32(CP_RB_RPTR_ADDR,
2749 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2750 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2751 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2752
2753 if (rdev->wb.enabled)
2754 WREG32(SCRATCH_UMSK, 0xff);
2755 else {
2756 tmp |= RB_NO_UPDATE;
2757 WREG32(SCRATCH_UMSK, 0);
2758 }
2759
2760 mdelay(1);
2761 WREG32(CP_RB_CNTL, tmp);
2762
2763 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2764 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2765
2766 r600_cp_start(rdev);
2767 ring->ready = true;
2768 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2769 if (r) {
2770 ring->ready = false;
2771 return r;
2772 }
2773
2774 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
2775 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
2776
2777 return 0;
2778}
2779
2780void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size)
2781{
2782 u32 rb_bufsz;
2783 int r;
2784
2785
2786 rb_bufsz = order_base_2(ring_size / 8);
2787 ring_size = (1 << (rb_bufsz + 1)) * 4;
2788 ring->ring_size = ring_size;
2789 ring->align_mask = 16 - 1;
2790
2791 if (radeon_ring_supports_scratch_reg(rdev, ring)) {
2792 r = radeon_scratch_get(rdev, &ring->rptr_save_reg);
2793 if (r) {
2794 DRM_ERROR("failed to get scratch reg for rptr save (%d).\n", r);
2795 ring->rptr_save_reg = 0;
2796 }
2797 }
2798}
2799
2800void r600_cp_fini(struct radeon_device *rdev)
2801{
2802 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2803 r600_cp_stop(rdev);
2804 radeon_ring_fini(rdev, ring);
2805 radeon_scratch_free(rdev, ring->rptr_save_reg);
2806}
2807
2808
2809
2810
2811void r600_scratch_init(struct radeon_device *rdev)
2812{
2813 int i;
2814
2815 rdev->scratch.num_reg = 7;
2816 rdev->scratch.reg_base = SCRATCH_REG0;
2817 for (i = 0; i < rdev->scratch.num_reg; i++) {
2818 rdev->scratch.free[i] = true;
2819 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
2820 }
2821}
2822
2823int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
2824{
2825 uint32_t scratch;
2826 uint32_t tmp = 0;
2827 unsigned i;
2828 int r;
2829
2830 r = radeon_scratch_get(rdev, &scratch);
2831 if (r) {
2832 DRM_ERROR("radeon: cp failed to get scratch reg (%d).\n", r);
2833 return r;
2834 }
2835 WREG32(scratch, 0xCAFEDEAD);
2836 r = radeon_ring_lock(rdev, ring, 3);
2837 if (r) {
2838 DRM_ERROR("radeon: cp failed to lock ring %d (%d).\n", ring->idx, r);
2839 radeon_scratch_free(rdev, scratch);
2840 return r;
2841 }
2842 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2843 radeon_ring_write(ring, ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2844 radeon_ring_write(ring, 0xDEADBEEF);
2845 radeon_ring_unlock_commit(rdev, ring, false);
2846 for (i = 0; i < rdev->usec_timeout; i++) {
2847 tmp = RREG32(scratch);
2848 if (tmp == 0xDEADBEEF)
2849 break;
2850 udelay(1);
2851 }
2852 if (i < rdev->usec_timeout) {
2853 DRM_INFO("ring test on %d succeeded in %d usecs\n", ring->idx, i);
2854 } else {
2855 DRM_ERROR("radeon: ring %d test failed (scratch(0x%04X)=0x%08X)\n",
2856 ring->idx, scratch, tmp);
2857 r = -EINVAL;
2858 }
2859 radeon_scratch_free(rdev, scratch);
2860 return r;
2861}
2862
2863
2864
2865
2866
2867void r600_fence_ring_emit(struct radeon_device *rdev,
2868 struct radeon_fence *fence)
2869{
2870 struct radeon_ring *ring = &rdev->ring[fence->ring];
2871 u32 cp_coher_cntl = PACKET3_TC_ACTION_ENA | PACKET3_VC_ACTION_ENA |
2872 PACKET3_SH_ACTION_ENA;
2873
2874 if (rdev->family >= CHIP_RV770)
2875 cp_coher_cntl |= PACKET3_FULL_CACHE_ENA;
2876
2877 if (rdev->wb.use_event) {
2878 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
2879
2880 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
2881 radeon_ring_write(ring, cp_coher_cntl);
2882 radeon_ring_write(ring, 0xFFFFFFFF);
2883 radeon_ring_write(ring, 0);
2884 radeon_ring_write(ring, 10);
2885
2886 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
2887 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT_TS) | EVENT_INDEX(5));
2888 radeon_ring_write(ring, lower_32_bits(addr));
2889 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
2890 radeon_ring_write(ring, fence->seq);
2891 radeon_ring_write(ring, 0);
2892 } else {
2893
2894 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
2895 radeon_ring_write(ring, cp_coher_cntl);
2896 radeon_ring_write(ring, 0xFFFFFFFF);
2897 radeon_ring_write(ring, 0);
2898 radeon_ring_write(ring, 10);
2899 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
2900 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT) | EVENT_INDEX(0));
2901
2902 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2903 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2904 radeon_ring_write(ring, WAIT_3D_IDLE_bit | WAIT_3D_IDLECLEAN_bit);
2905
2906 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2907 radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2908 radeon_ring_write(ring, fence->seq);
2909
2910 radeon_ring_write(ring, PACKET0(CP_INT_STATUS, 0));
2911 radeon_ring_write(ring, RB_INT_STAT);
2912 }
2913}
2914
2915
2916
2917
2918
2919
2920
2921
2922
2923
2924
2925
2926bool r600_semaphore_ring_emit(struct radeon_device *rdev,
2927 struct radeon_ring *ring,
2928 struct radeon_semaphore *semaphore,
2929 bool emit_wait)
2930{
2931 uint64_t addr = semaphore->gpu_addr;
2932 unsigned sel = emit_wait ? PACKET3_SEM_SEL_WAIT : PACKET3_SEM_SEL_SIGNAL;
2933
2934 if (rdev->family < CHIP_CAYMAN)
2935 sel |= PACKET3_SEM_WAIT_ON_SIGNAL;
2936
2937 radeon_ring_write(ring, PACKET3(PACKET3_MEM_SEMAPHORE, 1));
2938 radeon_ring_write(ring, lower_32_bits(addr));
2939 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | sel);
2940
2941
2942 if (emit_wait && (rdev->family >= CHIP_CEDAR)) {
2943
2944 radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
2945 radeon_ring_write(ring, 0x0);
2946 }
2947
2948 return true;
2949}
2950
2951
2952
2953
2954
2955
2956
2957
2958
2959
2960
2961
2962
2963
2964struct radeon_fence *r600_copy_cpdma(struct radeon_device *rdev,
2965 uint64_t src_offset, uint64_t dst_offset,
2966 unsigned num_gpu_pages,
2967 struct dma_resv *resv)
2968{
2969 struct radeon_fence *fence;
2970 struct radeon_sync sync;
2971 int ring_index = rdev->asic->copy.blit_ring_index;
2972 struct radeon_ring *ring = &rdev->ring[ring_index];
2973 u32 size_in_bytes, cur_size_in_bytes, tmp;
2974 int i, num_loops;
2975 int r = 0;
2976
2977 radeon_sync_create(&sync);
2978
2979 size_in_bytes = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT);
2980 num_loops = DIV_ROUND_UP(size_in_bytes, 0x1fffff);
2981 r = radeon_ring_lock(rdev, ring, num_loops * 6 + 24);
2982 if (r) {
2983 DRM_ERROR("radeon: moving bo (%d).\n", r);
2984 radeon_sync_free(rdev, &sync, NULL);
2985 return ERR_PTR(r);
2986 }
2987
2988 radeon_sync_resv(rdev, &sync, resv, false);
2989 radeon_sync_rings(rdev, &sync, ring->idx);
2990
2991 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2992 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2993 radeon_ring_write(ring, WAIT_3D_IDLE_bit);
2994 for (i = 0; i < num_loops; i++) {
2995 cur_size_in_bytes = size_in_bytes;
2996 if (cur_size_in_bytes > 0x1fffff)
2997 cur_size_in_bytes = 0x1fffff;
2998 size_in_bytes -= cur_size_in_bytes;
2999 tmp = upper_32_bits(src_offset) & 0xff;
3000 if (size_in_bytes == 0)
3001 tmp |= PACKET3_CP_DMA_CP_SYNC;
3002 radeon_ring_write(ring, PACKET3(PACKET3_CP_DMA, 4));
3003 radeon_ring_write(ring, lower_32_bits(src_offset));
3004 radeon_ring_write(ring, tmp);
3005 radeon_ring_write(ring, lower_32_bits(dst_offset));
3006 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
3007 radeon_ring_write(ring, cur_size_in_bytes);
3008 src_offset += cur_size_in_bytes;
3009 dst_offset += cur_size_in_bytes;
3010 }
3011 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3012 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
3013 radeon_ring_write(ring, WAIT_CP_DMA_IDLE_bit);
3014
3015 r = radeon_fence_emit(rdev, &fence, ring->idx);
3016 if (r) {
3017 radeon_ring_unlock_undo(rdev, ring);
3018 radeon_sync_free(rdev, &sync, NULL);
3019 return ERR_PTR(r);
3020 }
3021
3022 radeon_ring_unlock_commit(rdev, ring, false);
3023 radeon_sync_free(rdev, &sync, fence);
3024
3025 return fence;
3026}
3027
3028int r600_set_surface_reg(struct radeon_device *rdev, int reg,
3029 uint32_t tiling_flags, uint32_t pitch,
3030 uint32_t offset, uint32_t obj_size)
3031{
3032
3033 return 0;
3034}
3035
3036void r600_clear_surface_reg(struct radeon_device *rdev, int reg)
3037{
3038
3039}
3040
3041static void r600_uvd_init(struct radeon_device *rdev)
3042{
3043 int r;
3044
3045 if (!rdev->has_uvd)
3046 return;
3047
3048 r = radeon_uvd_init(rdev);
3049 if (r) {
3050 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
3051
3052
3053
3054
3055
3056
3057 rdev->has_uvd = false;
3058 return;
3059 }
3060 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
3061 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
3062}
3063
3064static void r600_uvd_start(struct radeon_device *rdev)
3065{
3066 int r;
3067
3068 if (!rdev->has_uvd)
3069 return;
3070
3071 r = uvd_v1_0_resume(rdev);
3072 if (r) {
3073 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
3074 goto error;
3075 }
3076 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
3077 if (r) {
3078 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
3079 goto error;
3080 }
3081 return;
3082
3083error:
3084 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
3085}
3086
3087static void r600_uvd_resume(struct radeon_device *rdev)
3088{
3089 struct radeon_ring *ring;
3090 int r;
3091
3092 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
3093 return;
3094
3095 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
3096 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
3097 if (r) {
3098 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
3099 return;
3100 }
3101 r = uvd_v1_0_init(rdev);
3102 if (r) {
3103 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
3104 return;
3105 }
3106}
3107
3108static int r600_startup(struct radeon_device *rdev)
3109{
3110 struct radeon_ring *ring;
3111 int r;
3112
3113
3114 r600_pcie_gen2_enable(rdev);
3115
3116
3117 r = r600_vram_scratch_init(rdev);
3118 if (r)
3119 return r;
3120
3121 r600_mc_program(rdev);
3122
3123 if (rdev->flags & RADEON_IS_AGP) {
3124 r600_agp_enable(rdev);
3125 } else {
3126 r = r600_pcie_gart_enable(rdev);
3127 if (r)
3128 return r;
3129 }
3130 r600_gpu_init(rdev);
3131
3132
3133 r = radeon_wb_init(rdev);
3134 if (r)
3135 return r;
3136
3137 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
3138 if (r) {
3139 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
3140 return r;
3141 }
3142
3143 r600_uvd_start(rdev);
3144
3145
3146 if (!rdev->irq.installed) {
3147 r = radeon_irq_kms_init(rdev);
3148 if (r)
3149 return r;
3150 }
3151
3152 r = r600_irq_init(rdev);
3153 if (r) {
3154 DRM_ERROR("radeon: IH init failed (%d).\n", r);
3155 radeon_irq_kms_fini(rdev);
3156 return r;
3157 }
3158 r600_irq_set(rdev);
3159
3160 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3161 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
3162 RADEON_CP_PACKET2);
3163 if (r)
3164 return r;
3165
3166 r = r600_cp_load_microcode(rdev);
3167 if (r)
3168 return r;
3169 r = r600_cp_resume(rdev);
3170 if (r)
3171 return r;
3172
3173 r600_uvd_resume(rdev);
3174
3175 r = radeon_ib_pool_init(rdev);
3176 if (r) {
3177 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
3178 return r;
3179 }
3180
3181 r = radeon_audio_init(rdev);
3182 if (r) {
3183 DRM_ERROR("radeon: audio init failed\n");
3184 return r;
3185 }
3186
3187 return 0;
3188}
3189
3190void r600_vga_set_state(struct radeon_device *rdev, bool state)
3191{
3192 uint32_t temp;
3193
3194 temp = RREG32(CONFIG_CNTL);
3195 if (!state) {
3196 temp &= ~(1<<0);
3197 temp |= (1<<1);
3198 } else {
3199 temp &= ~(1<<1);
3200 }
3201 WREG32(CONFIG_CNTL, temp);
3202}
3203
3204int r600_resume(struct radeon_device *rdev)
3205{
3206 int r;
3207
3208
3209
3210
3211
3212
3213 atom_asic_init(rdev->mode_info.atom_context);
3214
3215 if (rdev->pm.pm_method == PM_METHOD_DPM)
3216 radeon_pm_resume(rdev);
3217
3218 rdev->accel_working = true;
3219 r = r600_startup(rdev);
3220 if (r) {
3221 DRM_ERROR("r600 startup failed on resume\n");
3222 rdev->accel_working = false;
3223 return r;
3224 }
3225
3226 return r;
3227}
3228
3229int r600_suspend(struct radeon_device *rdev)
3230{
3231 radeon_pm_suspend(rdev);
3232 radeon_audio_fini(rdev);
3233 r600_cp_stop(rdev);
3234 if (rdev->has_uvd) {
3235 uvd_v1_0_fini(rdev);
3236 radeon_uvd_suspend(rdev);
3237 }
3238 r600_irq_suspend(rdev);
3239 radeon_wb_disable(rdev);
3240 r600_pcie_gart_disable(rdev);
3241
3242 return 0;
3243}
3244
3245
3246
3247
3248
3249
3250
3251int r600_init(struct radeon_device *rdev)
3252{
3253 int r;
3254
3255 r600_debugfs_mc_info_init(rdev);
3256
3257 if (!radeon_get_bios(rdev)) {
3258 if (ASIC_IS_AVIVO(rdev))
3259 return -EINVAL;
3260 }
3261
3262 if (!rdev->is_atom_bios) {
3263 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
3264 return -EINVAL;
3265 }
3266 r = radeon_atombios_init(rdev);
3267 if (r)
3268 return r;
3269
3270 if (!radeon_card_posted(rdev)) {
3271 if (!rdev->bios) {
3272 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
3273 return -EINVAL;
3274 }
3275 DRM_INFO("GPU not posted. posting now...\n");
3276 atom_asic_init(rdev->mode_info.atom_context);
3277 }
3278
3279 r600_scratch_init(rdev);
3280
3281 radeon_surface_init(rdev);
3282
3283 radeon_get_clock_info(rdev->ddev);
3284
3285 radeon_fence_driver_init(rdev);
3286 if (rdev->flags & RADEON_IS_AGP) {
3287 r = radeon_agp_init(rdev);
3288 if (r)
3289 radeon_agp_disable(rdev);
3290 }
3291 r = r600_mc_init(rdev);
3292 if (r)
3293 return r;
3294
3295 r = radeon_bo_init(rdev);
3296 if (r)
3297 return r;
3298
3299 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
3300 r = r600_init_microcode(rdev);
3301 if (r) {
3302 DRM_ERROR("Failed to load firmware!\n");
3303 return r;
3304 }
3305 }
3306
3307
3308 radeon_pm_init(rdev);
3309
3310 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
3311 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
3312
3313 r600_uvd_init(rdev);
3314
3315 rdev->ih.ring_obj = NULL;
3316 r600_ih_ring_init(rdev, 64 * 1024);
3317
3318 r = r600_pcie_gart_init(rdev);
3319 if (r)
3320 return r;
3321
3322 rdev->accel_working = true;
3323 r = r600_startup(rdev);
3324 if (r) {
3325 dev_err(rdev->dev, "disabling GPU acceleration\n");
3326 r600_cp_fini(rdev);
3327 r600_irq_fini(rdev);
3328 radeon_wb_fini(rdev);
3329 radeon_ib_pool_fini(rdev);
3330 radeon_irq_kms_fini(rdev);
3331 r600_pcie_gart_fini(rdev);
3332 rdev->accel_working = false;
3333 }
3334
3335 return 0;
3336}
3337
3338void r600_fini(struct radeon_device *rdev)
3339{
3340 radeon_pm_fini(rdev);
3341 radeon_audio_fini(rdev);
3342 r600_cp_fini(rdev);
3343 r600_irq_fini(rdev);
3344 if (rdev->has_uvd) {
3345 uvd_v1_0_fini(rdev);
3346 radeon_uvd_fini(rdev);
3347 }
3348 radeon_wb_fini(rdev);
3349 radeon_ib_pool_fini(rdev);
3350 radeon_irq_kms_fini(rdev);
3351 r600_pcie_gart_fini(rdev);
3352 r600_vram_scratch_fini(rdev);
3353 radeon_agp_fini(rdev);
3354 radeon_gem_fini(rdev);
3355 radeon_fence_driver_fini(rdev);
3356 radeon_bo_fini(rdev);
3357 radeon_atombios_fini(rdev);
3358 kfree(rdev->bios);
3359 rdev->bios = NULL;
3360}
3361
3362
3363
3364
3365
3366void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3367{
3368 struct radeon_ring *ring = &rdev->ring[ib->ring];
3369 u32 next_rptr;
3370
3371 if (ring->rptr_save_reg) {
3372 next_rptr = ring->wptr + 3 + 4;
3373 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3374 radeon_ring_write(ring, ((ring->rptr_save_reg -
3375 PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
3376 radeon_ring_write(ring, next_rptr);
3377 } else if (rdev->wb.enabled) {
3378 next_rptr = ring->wptr + 5 + 4;
3379 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3380 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3381 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3382 radeon_ring_write(ring, next_rptr);
3383 radeon_ring_write(ring, 0);
3384 }
3385
3386 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3387 radeon_ring_write(ring,
3388#ifdef __BIG_ENDIAN
3389 (2 << 0) |
3390#endif
3391 (ib->gpu_addr & 0xFFFFFFFC));
3392 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3393 radeon_ring_write(ring, ib->length_dw);
3394}
3395
3396int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
3397{
3398 struct radeon_ib ib;
3399 uint32_t scratch;
3400 uint32_t tmp = 0;
3401 unsigned i;
3402 int r;
3403
3404 r = radeon_scratch_get(rdev, &scratch);
3405 if (r) {
3406 DRM_ERROR("radeon: failed to get scratch reg (%d).\n", r);
3407 return r;
3408 }
3409 WREG32(scratch, 0xCAFEDEAD);
3410 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
3411 if (r) {
3412 DRM_ERROR("radeon: failed to get ib (%d).\n", r);
3413 goto free_scratch;
3414 }
3415 ib.ptr[0] = PACKET3(PACKET3_SET_CONFIG_REG, 1);
3416 ib.ptr[1] = ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
3417 ib.ptr[2] = 0xDEADBEEF;
3418 ib.length_dw = 3;
3419 r = radeon_ib_schedule(rdev, &ib, NULL, false);
3420 if (r) {
3421 DRM_ERROR("radeon: failed to schedule ib (%d).\n", r);
3422 goto free_ib;
3423 }
3424 r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies(
3425 RADEON_USEC_IB_TEST_TIMEOUT));
3426 if (r < 0) {
3427 DRM_ERROR("radeon: fence wait failed (%d).\n", r);
3428 goto free_ib;
3429 } else if (r == 0) {
3430 DRM_ERROR("radeon: fence wait timed out.\n");
3431 r = -ETIMEDOUT;
3432 goto free_ib;
3433 }
3434 r = 0;
3435 for (i = 0; i < rdev->usec_timeout; i++) {
3436 tmp = RREG32(scratch);
3437 if (tmp == 0xDEADBEEF)
3438 break;
3439 udelay(1);
3440 }
3441 if (i < rdev->usec_timeout) {
3442 DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
3443 } else {
3444 DRM_ERROR("radeon: ib test failed (scratch(0x%04X)=0x%08X)\n",
3445 scratch, tmp);
3446 r = -EINVAL;
3447 }
3448free_ib:
3449 radeon_ib_free(rdev, &ib);
3450free_scratch:
3451 radeon_scratch_free(rdev, scratch);
3452 return r;
3453}
3454
3455
3456
3457
3458
3459
3460
3461
3462
3463
3464
3465
3466void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size)
3467{
3468 u32 rb_bufsz;
3469
3470
3471 rb_bufsz = order_base_2(ring_size / 4);
3472 ring_size = (1 << rb_bufsz) * 4;
3473 rdev->ih.ring_size = ring_size;
3474 rdev->ih.ptr_mask = rdev->ih.ring_size - 1;
3475 rdev->ih.rptr = 0;
3476}
3477
3478int r600_ih_ring_alloc(struct radeon_device *rdev)
3479{
3480 int r;
3481
3482
3483 if (rdev->ih.ring_obj == NULL) {
3484 r = radeon_bo_create(rdev, rdev->ih.ring_size,
3485 PAGE_SIZE, true,
3486 RADEON_GEM_DOMAIN_GTT, 0,
3487 NULL, NULL, &rdev->ih.ring_obj);
3488 if (r) {
3489 DRM_ERROR("radeon: failed to create ih ring buffer (%d).\n", r);
3490 return r;
3491 }
3492 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
3493 if (unlikely(r != 0))
3494 return r;
3495 r = radeon_bo_pin(rdev->ih.ring_obj,
3496 RADEON_GEM_DOMAIN_GTT,
3497 &rdev->ih.gpu_addr);
3498 if (r) {
3499 radeon_bo_unreserve(rdev->ih.ring_obj);
3500 DRM_ERROR("radeon: failed to pin ih ring buffer (%d).\n", r);
3501 return r;
3502 }
3503 r = radeon_bo_kmap(rdev->ih.ring_obj,
3504 (void **)&rdev->ih.ring);
3505 radeon_bo_unreserve(rdev->ih.ring_obj);
3506 if (r) {
3507 DRM_ERROR("radeon: failed to map ih ring buffer (%d).\n", r);
3508 return r;
3509 }
3510 }
3511 return 0;
3512}
3513
3514void r600_ih_ring_fini(struct radeon_device *rdev)
3515{
3516 int r;
3517 if (rdev->ih.ring_obj) {
3518 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
3519 if (likely(r == 0)) {
3520 radeon_bo_kunmap(rdev->ih.ring_obj);
3521 radeon_bo_unpin(rdev->ih.ring_obj);
3522 radeon_bo_unreserve(rdev->ih.ring_obj);
3523 }
3524 radeon_bo_unref(&rdev->ih.ring_obj);
3525 rdev->ih.ring = NULL;
3526 rdev->ih.ring_obj = NULL;
3527 }
3528}
3529
3530void r600_rlc_stop(struct radeon_device *rdev)
3531{
3532
3533 if ((rdev->family >= CHIP_RV770) &&
3534 (rdev->family <= CHIP_RV740)) {
3535
3536 WREG32(SRBM_SOFT_RESET, SOFT_RESET_RLC);
3537 RREG32(SRBM_SOFT_RESET);
3538 mdelay(15);
3539 WREG32(SRBM_SOFT_RESET, 0);
3540 RREG32(SRBM_SOFT_RESET);
3541 }
3542
3543 WREG32(RLC_CNTL, 0);
3544}
3545
3546static void r600_rlc_start(struct radeon_device *rdev)
3547{
3548 WREG32(RLC_CNTL, RLC_ENABLE);
3549}
3550
3551static int r600_rlc_resume(struct radeon_device *rdev)
3552{
3553 u32 i;
3554 const __be32 *fw_data;
3555
3556 if (!rdev->rlc_fw)
3557 return -EINVAL;
3558
3559 r600_rlc_stop(rdev);
3560
3561 WREG32(RLC_HB_CNTL, 0);
3562
3563 WREG32(RLC_HB_BASE, 0);
3564 WREG32(RLC_HB_RPTR, 0);
3565 WREG32(RLC_HB_WPTR, 0);
3566 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
3567 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
3568 WREG32(RLC_MC_CNTL, 0);
3569 WREG32(RLC_UCODE_CNTL, 0);
3570
3571 fw_data = (const __be32 *)rdev->rlc_fw->data;
3572 if (rdev->family >= CHIP_RV770) {
3573 for (i = 0; i < R700_RLC_UCODE_SIZE; i++) {
3574 WREG32(RLC_UCODE_ADDR, i);
3575 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
3576 }
3577 } else {
3578 for (i = 0; i < R600_RLC_UCODE_SIZE; i++) {
3579 WREG32(RLC_UCODE_ADDR, i);
3580 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
3581 }
3582 }
3583 WREG32(RLC_UCODE_ADDR, 0);
3584
3585 r600_rlc_start(rdev);
3586
3587 return 0;
3588}
3589
3590static void r600_enable_interrupts(struct radeon_device *rdev)
3591{
3592 u32 ih_cntl = RREG32(IH_CNTL);
3593 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
3594
3595 ih_cntl |= ENABLE_INTR;
3596 ih_rb_cntl |= IH_RB_ENABLE;
3597 WREG32(IH_CNTL, ih_cntl);
3598 WREG32(IH_RB_CNTL, ih_rb_cntl);
3599 rdev->ih.enabled = true;
3600}
3601
3602void r600_disable_interrupts(struct radeon_device *rdev)
3603{
3604 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
3605 u32 ih_cntl = RREG32(IH_CNTL);
3606
3607 ih_rb_cntl &= ~IH_RB_ENABLE;
3608 ih_cntl &= ~ENABLE_INTR;
3609 WREG32(IH_RB_CNTL, ih_rb_cntl);
3610 WREG32(IH_CNTL, ih_cntl);
3611
3612 WREG32(IH_RB_RPTR, 0);
3613 WREG32(IH_RB_WPTR, 0);
3614 rdev->ih.enabled = false;
3615 rdev->ih.rptr = 0;
3616}
3617
3618static void r600_disable_interrupt_state(struct radeon_device *rdev)
3619{
3620 u32 tmp;
3621
3622 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3623 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3624 WREG32(DMA_CNTL, tmp);
3625 WREG32(GRBM_INT_CNTL, 0);
3626 WREG32(DxMODE_INT_MASK, 0);
3627 WREG32(D1GRPH_INTERRUPT_CONTROL, 0);
3628 WREG32(D2GRPH_INTERRUPT_CONTROL, 0);
3629 if (ASIC_IS_DCE3(rdev)) {
3630 WREG32(DCE3_DACA_AUTODETECT_INT_CONTROL, 0);
3631 WREG32(DCE3_DACB_AUTODETECT_INT_CONTROL, 0);
3632 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3633 WREG32(DC_HPD1_INT_CONTROL, tmp);
3634 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3635 WREG32(DC_HPD2_INT_CONTROL, tmp);
3636 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3637 WREG32(DC_HPD3_INT_CONTROL, tmp);
3638 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3639 WREG32(DC_HPD4_INT_CONTROL, tmp);
3640 if (ASIC_IS_DCE32(rdev)) {
3641 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3642 WREG32(DC_HPD5_INT_CONTROL, tmp);
3643 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3644 WREG32(DC_HPD6_INT_CONTROL, tmp);
3645 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3646 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, tmp);
3647 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3648 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, tmp);
3649 } else {
3650 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3651 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
3652 tmp = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3653 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, tmp);
3654 }
3655 } else {
3656 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
3657 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3658 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3659 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
3660 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3661 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
3662 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3663 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
3664 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3665 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
3666 tmp = RREG32(HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3667 WREG32(HDMI1_AUDIO_PACKET_CONTROL, tmp);
3668 }
3669}
3670
3671int r600_irq_init(struct radeon_device *rdev)
3672{
3673 int ret = 0;
3674 int rb_bufsz;
3675 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
3676
3677
3678 ret = r600_ih_ring_alloc(rdev);
3679 if (ret)
3680 return ret;
3681
3682
3683 r600_disable_interrupts(rdev);
3684
3685
3686 if (rdev->family >= CHIP_CEDAR)
3687 ret = evergreen_rlc_resume(rdev);
3688 else
3689 ret = r600_rlc_resume(rdev);
3690 if (ret) {
3691 r600_ih_ring_fini(rdev);
3692 return ret;
3693 }
3694
3695
3696
3697 WREG32(INTERRUPT_CNTL2, rdev->dummy_page.addr >> 8);
3698 interrupt_cntl = RREG32(INTERRUPT_CNTL);
3699
3700
3701
3702 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
3703
3704 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
3705 WREG32(INTERRUPT_CNTL, interrupt_cntl);
3706
3707 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
3708 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
3709
3710 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
3711 IH_WPTR_OVERFLOW_CLEAR |
3712 (rb_bufsz << 1));
3713
3714 if (rdev->wb.enabled)
3715 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
3716
3717
3718 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
3719 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
3720
3721 WREG32(IH_RB_CNTL, ih_rb_cntl);
3722
3723
3724 WREG32(IH_RB_RPTR, 0);
3725 WREG32(IH_RB_WPTR, 0);
3726
3727
3728 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10);
3729
3730 if (rdev->msi_enabled)
3731 ih_cntl |= RPTR_REARM;
3732 WREG32(IH_CNTL, ih_cntl);
3733
3734
3735 if (rdev->family >= CHIP_CEDAR)
3736 evergreen_disable_interrupt_state(rdev);
3737 else
3738 r600_disable_interrupt_state(rdev);
3739
3740
3741 pci_set_master(rdev->pdev);
3742
3743
3744 r600_enable_interrupts(rdev);
3745
3746 return ret;
3747}
3748
3749void r600_irq_suspend(struct radeon_device *rdev)
3750{
3751 r600_irq_disable(rdev);
3752 r600_rlc_stop(rdev);
3753}
3754
3755void r600_irq_fini(struct radeon_device *rdev)
3756{
3757 r600_irq_suspend(rdev);
3758 r600_ih_ring_fini(rdev);
3759}
3760
3761int r600_irq_set(struct radeon_device *rdev)
3762{
3763 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3764 u32 mode_int = 0;
3765 u32 hpd1, hpd2, hpd3, hpd4 = 0, hpd5 = 0, hpd6 = 0;
3766 u32 grbm_int_cntl = 0;
3767 u32 hdmi0, hdmi1;
3768 u32 dma_cntl;
3769 u32 thermal_int = 0;
3770
3771 if (!rdev->irq.installed) {
3772 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3773 return -EINVAL;
3774 }
3775
3776 if (!rdev->ih.enabled) {
3777 r600_disable_interrupts(rdev);
3778
3779 r600_disable_interrupt_state(rdev);
3780 return 0;
3781 }
3782
3783 if (ASIC_IS_DCE3(rdev)) {
3784 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3785 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3786 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3787 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3788 if (ASIC_IS_DCE32(rdev)) {
3789 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3790 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3791 hdmi0 = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3792 hdmi1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3793 } else {
3794 hdmi0 = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3795 hdmi1 = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3796 }
3797 } else {
3798 hpd1 = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3799 hpd2 = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3800 hpd3 = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3801 hdmi0 = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3802 hdmi1 = RREG32(HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3803 }
3804
3805 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3806
3807 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) {
3808 thermal_int = RREG32(CG_THERMAL_INT) &
3809 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
3810 } else if (rdev->family >= CHIP_RV770) {
3811 thermal_int = RREG32(RV770_CG_THERMAL_INT) &
3812 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
3813 }
3814 if (rdev->irq.dpm_thermal) {
3815 DRM_DEBUG("dpm thermal\n");
3816 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
3817 }
3818
3819 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3820 DRM_DEBUG("r600_irq_set: sw int\n");
3821 cp_int_cntl |= RB_INT_ENABLE;
3822 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3823 }
3824
3825 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3826 DRM_DEBUG("r600_irq_set: sw int dma\n");
3827 dma_cntl |= TRAP_ENABLE;
3828 }
3829
3830 if (rdev->irq.crtc_vblank_int[0] ||
3831 atomic_read(&rdev->irq.pflip[0])) {
3832 DRM_DEBUG("r600_irq_set: vblank 0\n");
3833 mode_int |= D1MODE_VBLANK_INT_MASK;
3834 }
3835 if (rdev->irq.crtc_vblank_int[1] ||
3836 atomic_read(&rdev->irq.pflip[1])) {
3837 DRM_DEBUG("r600_irq_set: vblank 1\n");
3838 mode_int |= D2MODE_VBLANK_INT_MASK;
3839 }
3840 if (rdev->irq.hpd[0]) {
3841 DRM_DEBUG("r600_irq_set: hpd 1\n");
3842 hpd1 |= DC_HPDx_INT_EN;
3843 }
3844 if (rdev->irq.hpd[1]) {
3845 DRM_DEBUG("r600_irq_set: hpd 2\n");
3846 hpd2 |= DC_HPDx_INT_EN;
3847 }
3848 if (rdev->irq.hpd[2]) {
3849 DRM_DEBUG("r600_irq_set: hpd 3\n");
3850 hpd3 |= DC_HPDx_INT_EN;
3851 }
3852 if (rdev->irq.hpd[3]) {
3853 DRM_DEBUG("r600_irq_set: hpd 4\n");
3854 hpd4 |= DC_HPDx_INT_EN;
3855 }
3856 if (rdev->irq.hpd[4]) {
3857 DRM_DEBUG("r600_irq_set: hpd 5\n");
3858 hpd5 |= DC_HPDx_INT_EN;
3859 }
3860 if (rdev->irq.hpd[5]) {
3861 DRM_DEBUG("r600_irq_set: hpd 6\n");
3862 hpd6 |= DC_HPDx_INT_EN;
3863 }
3864 if (rdev->irq.afmt[0]) {
3865 DRM_DEBUG("r600_irq_set: hdmi 0\n");
3866 hdmi0 |= HDMI0_AZ_FORMAT_WTRIG_MASK;
3867 }
3868 if (rdev->irq.afmt[1]) {
3869 DRM_DEBUG("r600_irq_set: hdmi 0\n");
3870 hdmi1 |= HDMI0_AZ_FORMAT_WTRIG_MASK;
3871 }
3872
3873 WREG32(CP_INT_CNTL, cp_int_cntl);
3874 WREG32(DMA_CNTL, dma_cntl);
3875 WREG32(DxMODE_INT_MASK, mode_int);
3876 WREG32(D1GRPH_INTERRUPT_CONTROL, DxGRPH_PFLIP_INT_MASK);
3877 WREG32(D2GRPH_INTERRUPT_CONTROL, DxGRPH_PFLIP_INT_MASK);
3878 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3879 if (ASIC_IS_DCE3(rdev)) {
3880 WREG32(DC_HPD1_INT_CONTROL, hpd1);
3881 WREG32(DC_HPD2_INT_CONTROL, hpd2);
3882 WREG32(DC_HPD3_INT_CONTROL, hpd3);
3883 WREG32(DC_HPD4_INT_CONTROL, hpd4);
3884 if (ASIC_IS_DCE32(rdev)) {
3885 WREG32(DC_HPD5_INT_CONTROL, hpd5);
3886 WREG32(DC_HPD6_INT_CONTROL, hpd6);
3887 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, hdmi0);
3888 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, hdmi1);
3889 } else {
3890 WREG32(HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
3891 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, hdmi1);
3892 }
3893 } else {
3894 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, hpd1);
3895 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, hpd2);
3896 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, hpd3);
3897 WREG32(HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
3898 WREG32(HDMI1_AUDIO_PACKET_CONTROL, hdmi1);
3899 }
3900 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) {
3901 WREG32(CG_THERMAL_INT, thermal_int);
3902 } else if (rdev->family >= CHIP_RV770) {
3903 WREG32(RV770_CG_THERMAL_INT, thermal_int);
3904 }
3905
3906
3907 RREG32(R_000E50_SRBM_STATUS);
3908
3909 return 0;
3910}
3911
3912static void r600_irq_ack(struct radeon_device *rdev)
3913{
3914 u32 tmp;
3915
3916 if (ASIC_IS_DCE3(rdev)) {
3917 rdev->irq.stat_regs.r600.disp_int = RREG32(DCE3_DISP_INTERRUPT_STATUS);
3918 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE);
3919 rdev->irq.stat_regs.r600.disp_int_cont2 = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE2);
3920 if (ASIC_IS_DCE32(rdev)) {
3921 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET0);
3922 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET1);
3923 } else {
3924 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3925 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(DCE3_HDMI1_STATUS);
3926 }
3927 } else {
3928 rdev->irq.stat_regs.r600.disp_int = RREG32(DISP_INTERRUPT_STATUS);
3929 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
3930 rdev->irq.stat_regs.r600.disp_int_cont2 = 0;
3931 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3932 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(HDMI1_STATUS);
3933 }
3934 rdev->irq.stat_regs.r600.d1grph_int = RREG32(D1GRPH_INTERRUPT_STATUS);
3935 rdev->irq.stat_regs.r600.d2grph_int = RREG32(D2GRPH_INTERRUPT_STATUS);
3936
3937 if (rdev->irq.stat_regs.r600.d1grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3938 WREG32(D1GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3939 if (rdev->irq.stat_regs.r600.d2grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3940 WREG32(D2GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3941 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT)
3942 WREG32(D1MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3943 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT)
3944 WREG32(D1MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3945 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT)
3946 WREG32(D2MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3947 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT)
3948 WREG32(D2MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3949 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) {
3950 if (ASIC_IS_DCE3(rdev)) {
3951 tmp = RREG32(DC_HPD1_INT_CONTROL);
3952 tmp |= DC_HPDx_INT_ACK;
3953 WREG32(DC_HPD1_INT_CONTROL, tmp);
3954 } else {
3955 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
3956 tmp |= DC_HPDx_INT_ACK;
3957 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
3958 }
3959 }
3960 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) {
3961 if (ASIC_IS_DCE3(rdev)) {
3962 tmp = RREG32(DC_HPD2_INT_CONTROL);
3963 tmp |= DC_HPDx_INT_ACK;
3964 WREG32(DC_HPD2_INT_CONTROL, tmp);
3965 } else {
3966 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
3967 tmp |= DC_HPDx_INT_ACK;
3968 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
3969 }
3970 }
3971 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) {
3972 if (ASIC_IS_DCE3(rdev)) {
3973 tmp = RREG32(DC_HPD3_INT_CONTROL);
3974 tmp |= DC_HPDx_INT_ACK;
3975 WREG32(DC_HPD3_INT_CONTROL, tmp);
3976 } else {
3977 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
3978 tmp |= DC_HPDx_INT_ACK;
3979 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
3980 }
3981 }
3982 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) {
3983 tmp = RREG32(DC_HPD4_INT_CONTROL);
3984 tmp |= DC_HPDx_INT_ACK;
3985 WREG32(DC_HPD4_INT_CONTROL, tmp);
3986 }
3987 if (ASIC_IS_DCE32(rdev)) {
3988 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) {
3989 tmp = RREG32(DC_HPD5_INT_CONTROL);
3990 tmp |= DC_HPDx_INT_ACK;
3991 WREG32(DC_HPD5_INT_CONTROL, tmp);
3992 }
3993 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) {
3994 tmp = RREG32(DC_HPD6_INT_CONTROL);
3995 tmp |= DC_HPDx_INT_ACK;
3996 WREG32(DC_HPD6_INT_CONTROL, tmp);
3997 }
3998 if (rdev->irq.stat_regs.r600.hdmi0_status & AFMT_AZ_FORMAT_WTRIG) {
3999 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0);
4000 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4001 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, tmp);
4002 }
4003 if (rdev->irq.stat_regs.r600.hdmi1_status & AFMT_AZ_FORMAT_WTRIG) {
4004 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1);
4005 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4006 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, tmp);
4007 }
4008 } else {
4009 if (rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG) {
4010 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL);
4011 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
4012 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
4013 }
4014 if (rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG) {
4015 if (ASIC_IS_DCE3(rdev)) {
4016 tmp = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL);
4017 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
4018 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, tmp);
4019 } else {
4020 tmp = RREG32(HDMI1_AUDIO_PACKET_CONTROL);
4021 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
4022 WREG32(HDMI1_AUDIO_PACKET_CONTROL, tmp);
4023 }
4024 }
4025 }
4026}
4027
4028void r600_irq_disable(struct radeon_device *rdev)
4029{
4030 r600_disable_interrupts(rdev);
4031
4032 mdelay(1);
4033 r600_irq_ack(rdev);
4034 r600_disable_interrupt_state(rdev);
4035}
4036
4037static u32 r600_get_ih_wptr(struct radeon_device *rdev)
4038{
4039 u32 wptr, tmp;
4040
4041 if (rdev->wb.enabled)
4042 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4043 else
4044 wptr = RREG32(IH_RB_WPTR);
4045
4046 if (wptr & RB_OVERFLOW) {
4047 wptr &= ~RB_OVERFLOW;
4048
4049
4050
4051
4052 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4053 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4054 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4055 tmp = RREG32(IH_RB_CNTL);
4056 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4057 WREG32(IH_RB_CNTL, tmp);
4058 }
4059 return (wptr & rdev->ih.ptr_mask);
4060}
4061
4062
4063
4064
4065
4066
4067
4068
4069
4070
4071
4072
4073
4074
4075
4076
4077
4078
4079
4080
4081
4082
4083
4084
4085
4086
4087
4088
4089
4090
4091
4092int r600_irq_process(struct radeon_device *rdev)
4093{
4094 u32 wptr;
4095 u32 rptr;
4096 u32 src_id, src_data;
4097 u32 ring_index;
4098 bool queue_hotplug = false;
4099 bool queue_hdmi = false;
4100 bool queue_thermal = false;
4101
4102 if (!rdev->ih.enabled || rdev->shutdown)
4103 return IRQ_NONE;
4104
4105
4106 if (!rdev->msi_enabled)
4107 RREG32(IH_RB_WPTR);
4108
4109 wptr = r600_get_ih_wptr(rdev);
4110
4111restart_ih:
4112
4113 if (atomic_xchg(&rdev->ih.lock, 1))
4114 return IRQ_NONE;
4115
4116 rptr = rdev->ih.rptr;
4117 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4118
4119
4120 rmb();
4121
4122
4123 r600_irq_ack(rdev);
4124
4125 while (rptr != wptr) {
4126
4127 ring_index = rptr / 4;
4128 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4129 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4130
4131 switch (src_id) {
4132 case 1:
4133 switch (src_data) {
4134 case 0:
4135 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT))
4136 DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
4137
4138 if (rdev->irq.crtc_vblank_int[0]) {
4139 drm_handle_vblank(rdev->ddev, 0);
4140 rdev->pm.vblank_sync = true;
4141 wake_up(&rdev->irq.vblank_queue);
4142 }
4143 if (atomic_read(&rdev->irq.pflip[0]))
4144 radeon_crtc_handle_vblank(rdev, 0);
4145 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4146 DRM_DEBUG("IH: D1 vblank\n");
4147
4148 break;
4149 case 1:
4150 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT))
4151 DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
4152
4153 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4154 DRM_DEBUG("IH: D1 vline\n");
4155
4156 break;
4157 default:
4158 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4159 break;
4160 }
4161 break;
4162 case 5:
4163 switch (src_data) {
4164 case 0:
4165 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT))
4166 DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
4167
4168 if (rdev->irq.crtc_vblank_int[1]) {
4169 drm_handle_vblank(rdev->ddev, 1);
4170 rdev->pm.vblank_sync = true;
4171 wake_up(&rdev->irq.vblank_queue);
4172 }
4173 if (atomic_read(&rdev->irq.pflip[1]))
4174 radeon_crtc_handle_vblank(rdev, 1);
4175 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VBLANK_INTERRUPT;
4176 DRM_DEBUG("IH: D2 vblank\n");
4177
4178 break;
4179 case 1:
4180 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT))
4181 DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
4182
4183 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VLINE_INTERRUPT;
4184 DRM_DEBUG("IH: D2 vline\n");
4185
4186 break;
4187 default:
4188 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4189 break;
4190 }
4191 break;
4192 case 9:
4193 DRM_DEBUG("IH: D1 flip\n");
4194 if (radeon_use_pflipirq > 0)
4195 radeon_crtc_handle_flip(rdev, 0);
4196 break;
4197 case 11:
4198 DRM_DEBUG("IH: D2 flip\n");
4199 if (radeon_use_pflipirq > 0)
4200 radeon_crtc_handle_flip(rdev, 1);
4201 break;
4202 case 19:
4203 switch (src_data) {
4204 case 0:
4205 if (!(rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT))
4206 DRM_DEBUG("IH: HPD1 - IH event w/o asserted irq bit?\n");
4207
4208 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD1_INTERRUPT;
4209 queue_hotplug = true;
4210 DRM_DEBUG("IH: HPD1\n");
4211 break;
4212 case 1:
4213 if (!(rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT))
4214 DRM_DEBUG("IH: HPD2 - IH event w/o asserted irq bit?\n");
4215
4216 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD2_INTERRUPT;
4217 queue_hotplug = true;
4218 DRM_DEBUG("IH: HPD2\n");
4219 break;
4220 case 4:
4221 if (!(rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT))
4222 DRM_DEBUG("IH: HPD3 - IH event w/o asserted irq bit?\n");
4223
4224 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD3_INTERRUPT;
4225 queue_hotplug = true;
4226 DRM_DEBUG("IH: HPD3\n");
4227 break;
4228 case 5:
4229 if (!(rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT))
4230 DRM_DEBUG("IH: HPD4 - IH event w/o asserted irq bit?\n");
4231
4232 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD4_INTERRUPT;
4233 queue_hotplug = true;
4234 DRM_DEBUG("IH: HPD4\n");
4235 break;
4236 case 10:
4237 if (!(rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT))
4238 DRM_DEBUG("IH: HPD5 - IH event w/o asserted irq bit?\n");
4239
4240 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD5_INTERRUPT;
4241 queue_hotplug = true;
4242 DRM_DEBUG("IH: HPD5\n");
4243 break;
4244 case 12:
4245 if (!(rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT))
4246 DRM_DEBUG("IH: HPD6 - IH event w/o asserted irq bit?\n");
4247
4248 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD6_INTERRUPT;
4249 queue_hotplug = true;
4250 DRM_DEBUG("IH: HPD6\n");
4251
4252 break;
4253 default:
4254 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4255 break;
4256 }
4257 break;
4258 case 21:
4259 switch (src_data) {
4260 case 4:
4261 if (!(rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG))
4262 DRM_DEBUG("IH: HDMI0 - IH event w/o asserted irq bit?\n");
4263
4264 rdev->irq.stat_regs.r600.hdmi0_status &= ~HDMI0_AZ_FORMAT_WTRIG;
4265 queue_hdmi = true;
4266 DRM_DEBUG("IH: HDMI0\n");
4267
4268 break;
4269 case 5:
4270 if (!(rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG))
4271 DRM_DEBUG("IH: HDMI1 - IH event w/o asserted irq bit?\n");
4272
4273 rdev->irq.stat_regs.r600.hdmi1_status &= ~HDMI0_AZ_FORMAT_WTRIG;
4274 queue_hdmi = true;
4275 DRM_DEBUG("IH: HDMI1\n");
4276
4277 break;
4278 default:
4279 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4280 break;
4281 }
4282 break;
4283 case 124:
4284 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4285 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4286 break;
4287 case 176:
4288 case 177:
4289 case 178:
4290 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4291 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4292 break;
4293 case 181:
4294 DRM_DEBUG("IH: CP EOP\n");
4295 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4296 break;
4297 case 224:
4298 DRM_DEBUG("IH: DMA trap\n");
4299 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4300 break;
4301 case 230:
4302 DRM_DEBUG("IH: thermal low to high\n");
4303 rdev->pm.dpm.thermal.high_to_low = false;
4304 queue_thermal = true;
4305 break;
4306 case 231:
4307 DRM_DEBUG("IH: thermal high to low\n");
4308 rdev->pm.dpm.thermal.high_to_low = true;
4309 queue_thermal = true;
4310 break;
4311 case 233:
4312 DRM_DEBUG("IH: GUI idle\n");
4313 break;
4314 default:
4315 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4316 break;
4317 }
4318
4319
4320 rptr += 16;
4321 rptr &= rdev->ih.ptr_mask;
4322 WREG32(IH_RB_RPTR, rptr);
4323 }
4324 if (queue_hotplug)
4325 schedule_delayed_work(&rdev->hotplug_work, 0);
4326 if (queue_hdmi)
4327 schedule_work(&rdev->audio_work);
4328 if (queue_thermal && rdev->pm.dpm_enabled)
4329 schedule_work(&rdev->pm.dpm.thermal.work);
4330 rdev->ih.rptr = rptr;
4331 atomic_set(&rdev->ih.lock, 0);
4332
4333
4334 wptr = r600_get_ih_wptr(rdev);
4335 if (wptr != rptr)
4336 goto restart_ih;
4337
4338 return IRQ_HANDLED;
4339}
4340
4341
4342
4343
4344#if defined(CONFIG_DEBUG_FS)
4345
4346static int r600_debugfs_mc_info_show(struct seq_file *m, void *unused)
4347{
4348 struct radeon_device *rdev = (struct radeon_device *)m->private;
4349
4350 DREG32_SYS(m, rdev, R_000E50_SRBM_STATUS);
4351 DREG32_SYS(m, rdev, VM_L2_STATUS);
4352 return 0;
4353}
4354
4355DEFINE_SHOW_ATTRIBUTE(r600_debugfs_mc_info);
4356#endif
4357
4358static void r600_debugfs_mc_info_init(struct radeon_device *rdev)
4359{
4360#if defined(CONFIG_DEBUG_FS)
4361 struct dentry *root = rdev->ddev->primary->debugfs_root;
4362
4363 debugfs_create_file("r600_mc_info", 0444, root, rdev,
4364 &r600_debugfs_mc_info_fops);
4365
4366#endif
4367}
4368
4369
4370
4371
4372
4373
4374
4375
4376
4377
4378void r600_mmio_hdp_flush(struct radeon_device *rdev)
4379{
4380
4381
4382
4383
4384
4385 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
4386 rdev->vram_scratch.ptr && !(rdev->flags & RADEON_IS_AGP)) {
4387 void __iomem *ptr = (void *)rdev->vram_scratch.ptr;
4388
4389 WREG32(HDP_DEBUG1, 0);
4390 readl((void __iomem *)ptr);
4391 } else
4392 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
4393}
4394
4395void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes)
4396{
4397 u32 link_width_cntl, mask;
4398
4399 if (rdev->flags & RADEON_IS_IGP)
4400 return;
4401
4402 if (!(rdev->flags & RADEON_IS_PCIE))
4403 return;
4404
4405
4406 if (ASIC_IS_X2(rdev))
4407 return;
4408
4409 radeon_gui_idle(rdev);
4410
4411 switch (lanes) {
4412 case 0:
4413 mask = RADEON_PCIE_LC_LINK_WIDTH_X0;
4414 break;
4415 case 1:
4416 mask = RADEON_PCIE_LC_LINK_WIDTH_X1;
4417 break;
4418 case 2:
4419 mask = RADEON_PCIE_LC_LINK_WIDTH_X2;
4420 break;
4421 case 4:
4422 mask = RADEON_PCIE_LC_LINK_WIDTH_X4;
4423 break;
4424 case 8:
4425 mask = RADEON_PCIE_LC_LINK_WIDTH_X8;
4426 break;
4427 case 12:
4428
4429 mask = RADEON_PCIE_LC_LINK_WIDTH_X12;
4430 break;
4431 case 16:
4432 mask = RADEON_PCIE_LC_LINK_WIDTH_X16;
4433 break;
4434 default:
4435 DRM_ERROR("invalid pcie lane request: %d\n", lanes);
4436 return;
4437 }
4438
4439 link_width_cntl = RREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
4440 link_width_cntl &= ~RADEON_PCIE_LC_LINK_WIDTH_MASK;
4441 link_width_cntl |= mask << RADEON_PCIE_LC_LINK_WIDTH_SHIFT;
4442 link_width_cntl |= (RADEON_PCIE_LC_RECONFIG_NOW |
4443 R600_PCIE_LC_RECONFIG_ARC_MISSING_ESCAPE);
4444
4445 WREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4446}
4447
4448int r600_get_pcie_lanes(struct radeon_device *rdev)
4449{
4450 u32 link_width_cntl;
4451
4452 if (rdev->flags & RADEON_IS_IGP)
4453 return 0;
4454
4455 if (!(rdev->flags & RADEON_IS_PCIE))
4456 return 0;
4457
4458
4459 if (ASIC_IS_X2(rdev))
4460 return 0;
4461
4462 radeon_gui_idle(rdev);
4463
4464 link_width_cntl = RREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
4465
4466 switch ((link_width_cntl & RADEON_PCIE_LC_LINK_WIDTH_RD_MASK) >> RADEON_PCIE_LC_LINK_WIDTH_RD_SHIFT) {
4467 case RADEON_PCIE_LC_LINK_WIDTH_X1:
4468 return 1;
4469 case RADEON_PCIE_LC_LINK_WIDTH_X2:
4470 return 2;
4471 case RADEON_PCIE_LC_LINK_WIDTH_X4:
4472 return 4;
4473 case RADEON_PCIE_LC_LINK_WIDTH_X8:
4474 return 8;
4475 case RADEON_PCIE_LC_LINK_WIDTH_X12:
4476
4477 return 12;
4478 case RADEON_PCIE_LC_LINK_WIDTH_X0:
4479 case RADEON_PCIE_LC_LINK_WIDTH_X16:
4480 default:
4481 return 16;
4482 }
4483}
4484
4485static void r600_pcie_gen2_enable(struct radeon_device *rdev)
4486{
4487 u32 link_width_cntl, lanes, speed_cntl, training_cntl, tmp;
4488 u16 link_cntl2;
4489
4490 if (radeon_pcie_gen2 == 0)
4491 return;
4492
4493 if (rdev->flags & RADEON_IS_IGP)
4494 return;
4495
4496 if (!(rdev->flags & RADEON_IS_PCIE))
4497 return;
4498
4499
4500 if (ASIC_IS_X2(rdev))
4501 return;
4502
4503
4504 if (rdev->family <= CHIP_R600)
4505 return;
4506
4507 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
4508 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
4509 return;
4510
4511 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4512 if (speed_cntl & LC_CURRENT_DATA_RATE) {
4513 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
4514 return;
4515 }
4516
4517 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
4518
4519
4520 if ((rdev->family == CHIP_RV670) ||
4521 (rdev->family == CHIP_RV620) ||
4522 (rdev->family == CHIP_RV635)) {
4523
4524 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4525 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
4526 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4527 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4528 if (link_width_cntl & LC_RENEGOTIATION_SUPPORT) {
4529 lanes = (link_width_cntl & LC_LINK_WIDTH_RD_MASK) >> LC_LINK_WIDTH_RD_SHIFT;
4530 link_width_cntl &= ~(LC_LINK_WIDTH_MASK |
4531 LC_RECONFIG_ARC_MISSING_ESCAPE);
4532 link_width_cntl |= lanes | LC_RECONFIG_NOW | LC_RENEGOTIATE_EN;
4533 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4534 } else {
4535 link_width_cntl |= LC_UPCONFIGURE_DIS;
4536 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4537 }
4538 }
4539
4540 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4541 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
4542 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
4543
4544
4545 if ((rdev->family == CHIP_RV670) ||
4546 (rdev->family == CHIP_RV620) ||
4547 (rdev->family == CHIP_RV635)) {
4548 WREG32(MM_CFGREGS_CNTL, 0x8);
4549 link_cntl2 = RREG32(0x4088);
4550 WREG32(MM_CFGREGS_CNTL, 0);
4551
4552 if (link_cntl2 & SELECTABLE_DEEMPHASIS)
4553 return;
4554 }
4555
4556 speed_cntl &= ~LC_SPEED_CHANGE_ATTEMPTS_ALLOWED_MASK;
4557 speed_cntl |= (0x3 << LC_SPEED_CHANGE_ATTEMPTS_ALLOWED_SHIFT);
4558 speed_cntl &= ~LC_VOLTAGE_TIMER_SEL_MASK;
4559 speed_cntl &= ~LC_FORCE_DIS_HW_SPEED_CHANGE;
4560 speed_cntl |= LC_FORCE_EN_HW_SPEED_CHANGE;
4561 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4562
4563 tmp = RREG32(0x541c);
4564 WREG32(0x541c, tmp | 0x8);
4565 WREG32(MM_CFGREGS_CNTL, MM_WR_TO_CFG_EN);
4566 link_cntl2 = RREG16(0x4088);
4567 link_cntl2 &= ~TARGET_LINK_SPEED_MASK;
4568 link_cntl2 |= 0x2;
4569 WREG16(0x4088, link_cntl2);
4570 WREG32(MM_CFGREGS_CNTL, 0);
4571
4572 if ((rdev->family == CHIP_RV670) ||
4573 (rdev->family == CHIP_RV620) ||
4574 (rdev->family == CHIP_RV635)) {
4575 training_cntl = RREG32_PCIE_PORT(PCIE_LC_TRAINING_CNTL);
4576 training_cntl &= ~LC_POINT_7_PLUS_EN;
4577 WREG32_PCIE_PORT(PCIE_LC_TRAINING_CNTL, training_cntl);
4578 } else {
4579 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4580 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
4581 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4582 }
4583
4584 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4585 speed_cntl |= LC_GEN2_EN_STRAP;
4586 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4587
4588 } else {
4589 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4590
4591 if (1)
4592 link_width_cntl |= LC_UPCONFIGURE_DIS;
4593 else
4594 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
4595 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4596 }
4597}
4598
4599
4600
4601
4602
4603
4604
4605
4606
4607uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev)
4608{
4609 uint64_t clock;
4610
4611 mutex_lock(&rdev->gpu_clock_mutex);
4612 WREG32(RLC_CAPTURE_GPU_CLOCK_COUNT, 1);
4613 clock = (uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_LSB) |
4614 ((uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_MSB) << 32ULL);
4615 mutex_unlock(&rdev->gpu_clock_mutex);
4616 return clock;
4617}
4618