1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <linux/slab.h>
29#include <linux/seq_file.h>
30#include <linux/firmware.h>
31#include <linux/module.h>
32#include <drm/drmP.h>
33#include <drm/radeon_drm.h>
34#include "radeon.h"
35#include "radeon_asic.h"
36#include "radeon_audio.h"
37#include "radeon_mode.h"
38#include "r600d.h"
39#include "atom.h"
40#include "avivod.h"
41#include "radeon_ucode.h"
42
43
44MODULE_FIRMWARE("radeon/R600_pfp.bin");
45MODULE_FIRMWARE("radeon/R600_me.bin");
46MODULE_FIRMWARE("radeon/RV610_pfp.bin");
47MODULE_FIRMWARE("radeon/RV610_me.bin");
48MODULE_FIRMWARE("radeon/RV630_pfp.bin");
49MODULE_FIRMWARE("radeon/RV630_me.bin");
50MODULE_FIRMWARE("radeon/RV620_pfp.bin");
51MODULE_FIRMWARE("radeon/RV620_me.bin");
52MODULE_FIRMWARE("radeon/RV635_pfp.bin");
53MODULE_FIRMWARE("radeon/RV635_me.bin");
54MODULE_FIRMWARE("radeon/RV670_pfp.bin");
55MODULE_FIRMWARE("radeon/RV670_me.bin");
56MODULE_FIRMWARE("radeon/RS780_pfp.bin");
57MODULE_FIRMWARE("radeon/RS780_me.bin");
58MODULE_FIRMWARE("radeon/RV770_pfp.bin");
59MODULE_FIRMWARE("radeon/RV770_me.bin");
60MODULE_FIRMWARE("radeon/RV770_smc.bin");
61MODULE_FIRMWARE("radeon/RV730_pfp.bin");
62MODULE_FIRMWARE("radeon/RV730_me.bin");
63MODULE_FIRMWARE("radeon/RV730_smc.bin");
64MODULE_FIRMWARE("radeon/RV740_smc.bin");
65MODULE_FIRMWARE("radeon/RV710_pfp.bin");
66MODULE_FIRMWARE("radeon/RV710_me.bin");
67MODULE_FIRMWARE("radeon/RV710_smc.bin");
68MODULE_FIRMWARE("radeon/R600_rlc.bin");
69MODULE_FIRMWARE("radeon/R700_rlc.bin");
70MODULE_FIRMWARE("radeon/CEDAR_pfp.bin");
71MODULE_FIRMWARE("radeon/CEDAR_me.bin");
72MODULE_FIRMWARE("radeon/CEDAR_rlc.bin");
73MODULE_FIRMWARE("radeon/CEDAR_smc.bin");
74MODULE_FIRMWARE("radeon/REDWOOD_pfp.bin");
75MODULE_FIRMWARE("radeon/REDWOOD_me.bin");
76MODULE_FIRMWARE("radeon/REDWOOD_rlc.bin");
77MODULE_FIRMWARE("radeon/REDWOOD_smc.bin");
78MODULE_FIRMWARE("radeon/JUNIPER_pfp.bin");
79MODULE_FIRMWARE("radeon/JUNIPER_me.bin");
80MODULE_FIRMWARE("radeon/JUNIPER_rlc.bin");
81MODULE_FIRMWARE("radeon/JUNIPER_smc.bin");
82MODULE_FIRMWARE("radeon/CYPRESS_pfp.bin");
83MODULE_FIRMWARE("radeon/CYPRESS_me.bin");
84MODULE_FIRMWARE("radeon/CYPRESS_rlc.bin");
85MODULE_FIRMWARE("radeon/CYPRESS_smc.bin");
86MODULE_FIRMWARE("radeon/PALM_pfp.bin");
87MODULE_FIRMWARE("radeon/PALM_me.bin");
88MODULE_FIRMWARE("radeon/SUMO_rlc.bin");
89MODULE_FIRMWARE("radeon/SUMO_pfp.bin");
90MODULE_FIRMWARE("radeon/SUMO_me.bin");
91MODULE_FIRMWARE("radeon/SUMO2_pfp.bin");
92MODULE_FIRMWARE("radeon/SUMO2_me.bin");
93
94static const u32 crtc_offsets[2] =
95{
96 0,
97 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL
98};
99
100int r600_debugfs_mc_info_init(struct radeon_device *rdev);
101
102
103int r600_mc_wait_for_idle(struct radeon_device *rdev);
104static void r600_gpu_init(struct radeon_device *rdev);
105void r600_fini(struct radeon_device *rdev);
106void r600_irq_disable(struct radeon_device *rdev);
107static void r600_pcie_gen2_enable(struct radeon_device *rdev);
108extern int evergreen_rlc_resume(struct radeon_device *rdev);
109extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
110
111
112
113
114u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg)
115{
116 unsigned long flags;
117 u32 r;
118
119 spin_lock_irqsave(&rdev->rcu_idx_lock, flags);
120 WREG32(R600_RCU_INDEX, ((reg) & 0x1fff));
121 r = RREG32(R600_RCU_DATA);
122 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags);
123 return r;
124}
125
126void r600_rcu_wreg(struct radeon_device *rdev, u32 reg, u32 v)
127{
128 unsigned long flags;
129
130 spin_lock_irqsave(&rdev->rcu_idx_lock, flags);
131 WREG32(R600_RCU_INDEX, ((reg) & 0x1fff));
132 WREG32(R600_RCU_DATA, (v));
133 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags);
134}
135
136u32 r600_uvd_ctx_rreg(struct radeon_device *rdev, u32 reg)
137{
138 unsigned long flags;
139 u32 r;
140
141 spin_lock_irqsave(&rdev->uvd_idx_lock, flags);
142 WREG32(R600_UVD_CTX_INDEX, ((reg) & 0x1ff));
143 r = RREG32(R600_UVD_CTX_DATA);
144 spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags);
145 return r;
146}
147
148void r600_uvd_ctx_wreg(struct radeon_device *rdev, u32 reg, u32 v)
149{
150 unsigned long flags;
151
152 spin_lock_irqsave(&rdev->uvd_idx_lock, flags);
153 WREG32(R600_UVD_CTX_INDEX, ((reg) & 0x1ff));
154 WREG32(R600_UVD_CTX_DATA, (v));
155 spin_unlock_irqrestore(&rdev->uvd_idx_lock, flags);
156}
157
158
159
160
161
162
163
164
165
166
167
168int r600_get_allowed_info_register(struct radeon_device *rdev,
169 u32 reg, u32 *val)
170{
171 switch (reg) {
172 case GRBM_STATUS:
173 case GRBM_STATUS2:
174 case R_000E50_SRBM_STATUS:
175 case DMA_STATUS_REG:
176 case UVD_STATUS:
177 *val = RREG32(reg);
178 return 0;
179 default:
180 return -EINVAL;
181 }
182}
183
184
185
186
187
188
189
190
191
192u32 r600_get_xclk(struct radeon_device *rdev)
193{
194 return rdev->clock.spll.reference_freq;
195}
196
197int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
198{
199 unsigned fb_div = 0, ref_div, vclk_div = 0, dclk_div = 0;
200 int r;
201
202
203 WREG32_P(CG_UPLL_FUNC_CNTL_2,
204 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
205 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
206
207
208 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~(
209 UPLL_RESET_MASK | UPLL_SLEEP_MASK | UPLL_CTLREQ_MASK));
210
211 if (rdev->family >= CHIP_RS780)
212 WREG32_P(GFX_MACRO_BYPASS_CNTL, UPLL_BYPASS_CNTL,
213 ~UPLL_BYPASS_CNTL);
214
215 if (!vclk || !dclk) {
216
217 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
218 return 0;
219 }
220
221 if (rdev->clock.spll.reference_freq == 10000)
222 ref_div = 34;
223 else
224 ref_div = 4;
225
226 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000,
227 ref_div + 1, 0xFFF, 2, 30, ~0,
228 &fb_div, &vclk_div, &dclk_div);
229 if (r)
230 return r;
231
232 if (rdev->family >= CHIP_RV670 && rdev->family < CHIP_RS780)
233 fb_div >>= 1;
234 else
235 fb_div |= 1;
236
237 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
238 if (r)
239 return r;
240
241
242 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
243
244
245 if (rdev->family >= CHIP_RS780)
246 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_REFCLK_SRC_SEL_MASK,
247 ~UPLL_REFCLK_SRC_SEL_MASK);
248
249
250 WREG32_P(CG_UPLL_FUNC_CNTL,
251 UPLL_FB_DIV(fb_div) |
252 UPLL_REF_DIV(ref_div),
253 ~(UPLL_FB_DIV_MASK | UPLL_REF_DIV_MASK));
254 WREG32_P(CG_UPLL_FUNC_CNTL_2,
255 UPLL_SW_HILEN(vclk_div >> 1) |
256 UPLL_SW_LOLEN((vclk_div >> 1) + (vclk_div & 1)) |
257 UPLL_SW_HILEN2(dclk_div >> 1) |
258 UPLL_SW_LOLEN2((dclk_div >> 1) + (dclk_div & 1)) |
259 UPLL_DIVEN_MASK | UPLL_DIVEN2_MASK,
260 ~UPLL_SW_MASK);
261
262
263 mdelay(15);
264
265
266 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
267
268 mdelay(15);
269
270
271 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
272
273 if (rdev->family >= CHIP_RS780)
274 WREG32_P(GFX_MACRO_BYPASS_CNTL, 0, ~UPLL_BYPASS_CNTL);
275
276 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
277 if (r)
278 return r;
279
280
281 WREG32_P(CG_UPLL_FUNC_CNTL_2,
282 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
283 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
284
285 mdelay(100);
286
287 return 0;
288}
289
290void dce3_program_fmt(struct drm_encoder *encoder)
291{
292 struct drm_device *dev = encoder->dev;
293 struct radeon_device *rdev = dev->dev_private;
294 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
295 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
296 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
297 int bpc = 0;
298 u32 tmp = 0;
299 enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
300
301 if (connector) {
302 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
303 bpc = radeon_get_monitor_bpc(connector);
304 dither = radeon_connector->dither;
305 }
306
307
308 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
309 return;
310
311
312 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
313 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
314 return;
315
316 if (bpc == 0)
317 return;
318
319 switch (bpc) {
320 case 6:
321 if (dither == RADEON_FMT_DITHER_ENABLE)
322
323 tmp |= FMT_SPATIAL_DITHER_EN;
324 else
325 tmp |= FMT_TRUNCATE_EN;
326 break;
327 case 8:
328 if (dither == RADEON_FMT_DITHER_ENABLE)
329
330 tmp |= (FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
331 else
332 tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
333 break;
334 case 10:
335 default:
336
337 break;
338 }
339
340 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
341}
342
343
344int rv6xx_get_temp(struct radeon_device *rdev)
345{
346 u32 temp = (RREG32(CG_THERMAL_STATUS) & ASIC_T_MASK) >>
347 ASIC_T_SHIFT;
348 int actual_temp = temp & 0xff;
349
350 if (temp & 0x100)
351 actual_temp -= 256;
352
353 return actual_temp * 1000;
354}
355
356void r600_pm_get_dynpm_state(struct radeon_device *rdev)
357{
358 int i;
359
360 rdev->pm.dynpm_can_upclock = true;
361 rdev->pm.dynpm_can_downclock = true;
362
363
364 if ((rdev->flags & RADEON_IS_IGP) || (rdev->family == CHIP_R600)) {
365 int min_power_state_index = 0;
366
367 if (rdev->pm.num_power_states > 2)
368 min_power_state_index = 1;
369
370 switch (rdev->pm.dynpm_planned_action) {
371 case DYNPM_ACTION_MINIMUM:
372 rdev->pm.requested_power_state_index = min_power_state_index;
373 rdev->pm.requested_clock_mode_index = 0;
374 rdev->pm.dynpm_can_downclock = false;
375 break;
376 case DYNPM_ACTION_DOWNCLOCK:
377 if (rdev->pm.current_power_state_index == min_power_state_index) {
378 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
379 rdev->pm.dynpm_can_downclock = false;
380 } else {
381 if (rdev->pm.active_crtc_count > 1) {
382 for (i = 0; i < rdev->pm.num_power_states; i++) {
383 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
384 continue;
385 else if (i >= rdev->pm.current_power_state_index) {
386 rdev->pm.requested_power_state_index =
387 rdev->pm.current_power_state_index;
388 break;
389 } else {
390 rdev->pm.requested_power_state_index = i;
391 break;
392 }
393 }
394 } else {
395 if (rdev->pm.current_power_state_index == 0)
396 rdev->pm.requested_power_state_index =
397 rdev->pm.num_power_states - 1;
398 else
399 rdev->pm.requested_power_state_index =
400 rdev->pm.current_power_state_index - 1;
401 }
402 }
403 rdev->pm.requested_clock_mode_index = 0;
404
405 if ((rdev->pm.active_crtc_count > 0) &&
406 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
407 clock_info[rdev->pm.requested_clock_mode_index].flags &
408 RADEON_PM_MODE_NO_DISPLAY)) {
409 rdev->pm.requested_power_state_index++;
410 }
411 break;
412 case DYNPM_ACTION_UPCLOCK:
413 if (rdev->pm.current_power_state_index == (rdev->pm.num_power_states - 1)) {
414 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
415 rdev->pm.dynpm_can_upclock = false;
416 } else {
417 if (rdev->pm.active_crtc_count > 1) {
418 for (i = (rdev->pm.num_power_states - 1); i >= 0; i--) {
419 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
420 continue;
421 else if (i <= rdev->pm.current_power_state_index) {
422 rdev->pm.requested_power_state_index =
423 rdev->pm.current_power_state_index;
424 break;
425 } else {
426 rdev->pm.requested_power_state_index = i;
427 break;
428 }
429 }
430 } else
431 rdev->pm.requested_power_state_index =
432 rdev->pm.current_power_state_index + 1;
433 }
434 rdev->pm.requested_clock_mode_index = 0;
435 break;
436 case DYNPM_ACTION_DEFAULT:
437 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
438 rdev->pm.requested_clock_mode_index = 0;
439 rdev->pm.dynpm_can_upclock = false;
440 break;
441 case DYNPM_ACTION_NONE:
442 default:
443 DRM_ERROR("Requested mode for not defined action\n");
444 return;
445 }
446 } else {
447
448
449
450 if (rdev->pm.active_crtc_count > 1) {
451 rdev->pm.requested_power_state_index = -1;
452
453 for (i = 1; i < rdev->pm.num_power_states; i++) {
454 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
455 continue;
456 else if ((rdev->pm.power_state[i].type == POWER_STATE_TYPE_PERFORMANCE) ||
457 (rdev->pm.power_state[i].type == POWER_STATE_TYPE_BATTERY)) {
458 rdev->pm.requested_power_state_index = i;
459 break;
460 }
461 }
462
463 if (rdev->pm.requested_power_state_index == -1)
464 rdev->pm.requested_power_state_index = 0;
465 } else
466 rdev->pm.requested_power_state_index = 1;
467
468 switch (rdev->pm.dynpm_planned_action) {
469 case DYNPM_ACTION_MINIMUM:
470 rdev->pm.requested_clock_mode_index = 0;
471 rdev->pm.dynpm_can_downclock = false;
472 break;
473 case DYNPM_ACTION_DOWNCLOCK:
474 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
475 if (rdev->pm.current_clock_mode_index == 0) {
476 rdev->pm.requested_clock_mode_index = 0;
477 rdev->pm.dynpm_can_downclock = false;
478 } else
479 rdev->pm.requested_clock_mode_index =
480 rdev->pm.current_clock_mode_index - 1;
481 } else {
482 rdev->pm.requested_clock_mode_index = 0;
483 rdev->pm.dynpm_can_downclock = false;
484 }
485
486 if ((rdev->pm.active_crtc_count > 0) &&
487 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
488 clock_info[rdev->pm.requested_clock_mode_index].flags &
489 RADEON_PM_MODE_NO_DISPLAY)) {
490 rdev->pm.requested_clock_mode_index++;
491 }
492 break;
493 case DYNPM_ACTION_UPCLOCK:
494 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
495 if (rdev->pm.current_clock_mode_index ==
496 (rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1)) {
497 rdev->pm.requested_clock_mode_index = rdev->pm.current_clock_mode_index;
498 rdev->pm.dynpm_can_upclock = false;
499 } else
500 rdev->pm.requested_clock_mode_index =
501 rdev->pm.current_clock_mode_index + 1;
502 } else {
503 rdev->pm.requested_clock_mode_index =
504 rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1;
505 rdev->pm.dynpm_can_upclock = false;
506 }
507 break;
508 case DYNPM_ACTION_DEFAULT:
509 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
510 rdev->pm.requested_clock_mode_index = 0;
511 rdev->pm.dynpm_can_upclock = false;
512 break;
513 case DYNPM_ACTION_NONE:
514 default:
515 DRM_ERROR("Requested mode for not defined action\n");
516 return;
517 }
518 }
519
520 DRM_DEBUG_DRIVER("Requested: e: %d m: %d p: %d\n",
521 rdev->pm.power_state[rdev->pm.requested_power_state_index].
522 clock_info[rdev->pm.requested_clock_mode_index].sclk,
523 rdev->pm.power_state[rdev->pm.requested_power_state_index].
524 clock_info[rdev->pm.requested_clock_mode_index].mclk,
525 rdev->pm.power_state[rdev->pm.requested_power_state_index].
526 pcie_lanes);
527}
528
529void rs780_pm_init_profile(struct radeon_device *rdev)
530{
531 if (rdev->pm.num_power_states == 2) {
532
533 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
534 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
535 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
536 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
537
538 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0;
539 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0;
540 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
541 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
542
543 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0;
544 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 0;
545 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
546 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
547
548 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0;
549 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
550 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
551 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
552
553 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 0;
554 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
555 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
556 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
557
558 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0;
559 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
560 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
561 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
562
563 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0;
564 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 1;
565 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
566 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
567 } else if (rdev->pm.num_power_states == 3) {
568
569 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
570 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
571 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
572 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
573
574 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
575 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
576 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
577 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
578
579 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
580 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
581 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
582 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
583
584 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
585 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 2;
586 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
587 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
588
589 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 1;
590 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 1;
591 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
592 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
593
594 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 1;
595 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 1;
596 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
597 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
598
599 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 1;
600 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
601 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
602 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
603 } else {
604
605 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
606 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
607 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
608 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
609
610 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 2;
611 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 2;
612 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
613 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
614
615 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 2;
616 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 2;
617 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
618 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
619
620 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 2;
621 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 3;
622 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
623 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
624
625 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
626 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
627 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
628 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
629
630 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
631 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
632 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
633 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
634
635 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
636 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 3;
637 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
638 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
639 }
640}
641
642void r600_pm_init_profile(struct radeon_device *rdev)
643{
644 int idx;
645
646 if (rdev->family == CHIP_R600) {
647
648
649 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
650 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
651 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
652 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
653
654 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
655 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
656 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
657 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
658
659 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
660 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
661 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
662 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
663
664 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
665 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
666 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
667 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
668
669 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
670 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
671 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
672 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
673
674 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
675 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
676 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
677 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
678
679 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
680 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
681 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
682 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
683 } else {
684 if (rdev->pm.num_power_states < 4) {
685
686 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
687 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
688 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
689 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
690
691 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
692 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
693 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
694 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
695
696 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
697 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
698 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
699 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
700
701 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
702 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
703 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
704 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
705
706 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
707 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 2;
708 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
709 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
710
711 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
712 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 2;
713 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
714 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
715
716 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
717 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
718 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
719 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
720 } else {
721
722 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
723 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
724 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
725 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
726
727 if (rdev->flags & RADEON_IS_MOBILITY)
728 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
729 else
730 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
731 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
732 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
733 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
734 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
735
736 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
737 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
738 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
739 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
740
741 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
742 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
743 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
744 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
745 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
746
747 if (rdev->flags & RADEON_IS_MOBILITY)
748 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1);
749 else
750 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
751 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
752 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
753 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
754 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
755
756 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
757 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
758 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
759 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
760
761 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
762 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
763 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
764 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
765 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
766 }
767 }
768}
769
770void r600_pm_misc(struct radeon_device *rdev)
771{
772 int req_ps_idx = rdev->pm.requested_power_state_index;
773 int req_cm_idx = rdev->pm.requested_clock_mode_index;
774 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
775 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
776
777 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
778
779 if (voltage->voltage == 0xff01)
780 return;
781 if (voltage->voltage != rdev->pm.current_vddc) {
782 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
783 rdev->pm.current_vddc = voltage->voltage;
784 DRM_DEBUG_DRIVER("Setting: v: %d\n", voltage->voltage);
785 }
786 }
787}
788
789bool r600_gui_idle(struct radeon_device *rdev)
790{
791 if (RREG32(GRBM_STATUS) & GUI_ACTIVE)
792 return false;
793 else
794 return true;
795}
796
797
798bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
799{
800 bool connected = false;
801
802 if (ASIC_IS_DCE3(rdev)) {
803 switch (hpd) {
804 case RADEON_HPD_1:
805 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
806 connected = true;
807 break;
808 case RADEON_HPD_2:
809 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
810 connected = true;
811 break;
812 case RADEON_HPD_3:
813 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
814 connected = true;
815 break;
816 case RADEON_HPD_4:
817 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
818 connected = true;
819 break;
820
821 case RADEON_HPD_5:
822 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
823 connected = true;
824 break;
825 case RADEON_HPD_6:
826 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
827 connected = true;
828 break;
829 default:
830 break;
831 }
832 } else {
833 switch (hpd) {
834 case RADEON_HPD_1:
835 if (RREG32(DC_HOT_PLUG_DETECT1_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
836 connected = true;
837 break;
838 case RADEON_HPD_2:
839 if (RREG32(DC_HOT_PLUG_DETECT2_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
840 connected = true;
841 break;
842 case RADEON_HPD_3:
843 if (RREG32(DC_HOT_PLUG_DETECT3_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
844 connected = true;
845 break;
846 default:
847 break;
848 }
849 }
850 return connected;
851}
852
853void r600_hpd_set_polarity(struct radeon_device *rdev,
854 enum radeon_hpd_id hpd)
855{
856 u32 tmp;
857 bool connected = r600_hpd_sense(rdev, hpd);
858
859 if (ASIC_IS_DCE3(rdev)) {
860 switch (hpd) {
861 case RADEON_HPD_1:
862 tmp = RREG32(DC_HPD1_INT_CONTROL);
863 if (connected)
864 tmp &= ~DC_HPDx_INT_POLARITY;
865 else
866 tmp |= DC_HPDx_INT_POLARITY;
867 WREG32(DC_HPD1_INT_CONTROL, tmp);
868 break;
869 case RADEON_HPD_2:
870 tmp = RREG32(DC_HPD2_INT_CONTROL);
871 if (connected)
872 tmp &= ~DC_HPDx_INT_POLARITY;
873 else
874 tmp |= DC_HPDx_INT_POLARITY;
875 WREG32(DC_HPD2_INT_CONTROL, tmp);
876 break;
877 case RADEON_HPD_3:
878 tmp = RREG32(DC_HPD3_INT_CONTROL);
879 if (connected)
880 tmp &= ~DC_HPDx_INT_POLARITY;
881 else
882 tmp |= DC_HPDx_INT_POLARITY;
883 WREG32(DC_HPD3_INT_CONTROL, tmp);
884 break;
885 case RADEON_HPD_4:
886 tmp = RREG32(DC_HPD4_INT_CONTROL);
887 if (connected)
888 tmp &= ~DC_HPDx_INT_POLARITY;
889 else
890 tmp |= DC_HPDx_INT_POLARITY;
891 WREG32(DC_HPD4_INT_CONTROL, tmp);
892 break;
893 case RADEON_HPD_5:
894 tmp = RREG32(DC_HPD5_INT_CONTROL);
895 if (connected)
896 tmp &= ~DC_HPDx_INT_POLARITY;
897 else
898 tmp |= DC_HPDx_INT_POLARITY;
899 WREG32(DC_HPD5_INT_CONTROL, tmp);
900 break;
901
902 case RADEON_HPD_6:
903 tmp = RREG32(DC_HPD6_INT_CONTROL);
904 if (connected)
905 tmp &= ~DC_HPDx_INT_POLARITY;
906 else
907 tmp |= DC_HPDx_INT_POLARITY;
908 WREG32(DC_HPD6_INT_CONTROL, tmp);
909 break;
910 default:
911 break;
912 }
913 } else {
914 switch (hpd) {
915 case RADEON_HPD_1:
916 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
917 if (connected)
918 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
919 else
920 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
921 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
922 break;
923 case RADEON_HPD_2:
924 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
925 if (connected)
926 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
927 else
928 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
929 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
930 break;
931 case RADEON_HPD_3:
932 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
933 if (connected)
934 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
935 else
936 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
937 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
938 break;
939 default:
940 break;
941 }
942 }
943}
944
945void r600_hpd_init(struct radeon_device *rdev)
946{
947 struct drm_device *dev = rdev->ddev;
948 struct drm_connector *connector;
949 unsigned enable = 0;
950
951 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
952 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
953
954 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
955 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
956
957
958
959
960 continue;
961 }
962 if (ASIC_IS_DCE3(rdev)) {
963 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) | DC_HPDx_RX_INT_TIMER(0xfa);
964 if (ASIC_IS_DCE32(rdev))
965 tmp |= DC_HPDx_EN;
966
967 switch (radeon_connector->hpd.hpd) {
968 case RADEON_HPD_1:
969 WREG32(DC_HPD1_CONTROL, tmp);
970 break;
971 case RADEON_HPD_2:
972 WREG32(DC_HPD2_CONTROL, tmp);
973 break;
974 case RADEON_HPD_3:
975 WREG32(DC_HPD3_CONTROL, tmp);
976 break;
977 case RADEON_HPD_4:
978 WREG32(DC_HPD4_CONTROL, tmp);
979 break;
980
981 case RADEON_HPD_5:
982 WREG32(DC_HPD5_CONTROL, tmp);
983 break;
984 case RADEON_HPD_6:
985 WREG32(DC_HPD6_CONTROL, tmp);
986 break;
987 default:
988 break;
989 }
990 } else {
991 switch (radeon_connector->hpd.hpd) {
992 case RADEON_HPD_1:
993 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, DC_HOT_PLUG_DETECTx_EN);
994 break;
995 case RADEON_HPD_2:
996 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, DC_HOT_PLUG_DETECTx_EN);
997 break;
998 case RADEON_HPD_3:
999 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, DC_HOT_PLUG_DETECTx_EN);
1000 break;
1001 default:
1002 break;
1003 }
1004 }
1005 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1006 enable |= 1 << radeon_connector->hpd.hpd;
1007 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1008 }
1009 radeon_irq_kms_enable_hpd(rdev, enable);
1010}
1011
1012void r600_hpd_fini(struct radeon_device *rdev)
1013{
1014 struct drm_device *dev = rdev->ddev;
1015 struct drm_connector *connector;
1016 unsigned disable = 0;
1017
1018 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1019 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1020 if (ASIC_IS_DCE3(rdev)) {
1021 switch (radeon_connector->hpd.hpd) {
1022 case RADEON_HPD_1:
1023 WREG32(DC_HPD1_CONTROL, 0);
1024 break;
1025 case RADEON_HPD_2:
1026 WREG32(DC_HPD2_CONTROL, 0);
1027 break;
1028 case RADEON_HPD_3:
1029 WREG32(DC_HPD3_CONTROL, 0);
1030 break;
1031 case RADEON_HPD_4:
1032 WREG32(DC_HPD4_CONTROL, 0);
1033 break;
1034
1035 case RADEON_HPD_5:
1036 WREG32(DC_HPD5_CONTROL, 0);
1037 break;
1038 case RADEON_HPD_6:
1039 WREG32(DC_HPD6_CONTROL, 0);
1040 break;
1041 default:
1042 break;
1043 }
1044 } else {
1045 switch (radeon_connector->hpd.hpd) {
1046 case RADEON_HPD_1:
1047 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, 0);
1048 break;
1049 case RADEON_HPD_2:
1050 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, 0);
1051 break;
1052 case RADEON_HPD_3:
1053 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, 0);
1054 break;
1055 default:
1056 break;
1057 }
1058 }
1059 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1060 disable |= 1 << radeon_connector->hpd.hpd;
1061 }
1062 radeon_irq_kms_disable_hpd(rdev, disable);
1063}
1064
1065
1066
1067
1068void r600_pcie_gart_tlb_flush(struct radeon_device *rdev)
1069{
1070 unsigned i;
1071 u32 tmp;
1072
1073
1074 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
1075 !(rdev->flags & RADEON_IS_AGP)) {
1076 void __iomem *ptr = (void *)rdev->gart.ptr;
1077 u32 tmp;
1078
1079
1080
1081
1082
1083
1084 WREG32(HDP_DEBUG1, 0);
1085 tmp = readl((void __iomem *)ptr);
1086 } else
1087 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
1088
1089 WREG32(VM_CONTEXT0_INVALIDATION_LOW_ADDR, rdev->mc.gtt_start >> 12);
1090 WREG32(VM_CONTEXT0_INVALIDATION_HIGH_ADDR, (rdev->mc.gtt_end - 1) >> 12);
1091 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
1092 for (i = 0; i < rdev->usec_timeout; i++) {
1093
1094 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
1095 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
1096 if (tmp == 2) {
1097 pr_warn("[drm] r600 flush TLB failed\n");
1098 return;
1099 }
1100 if (tmp) {
1101 return;
1102 }
1103 udelay(1);
1104 }
1105}
1106
1107int r600_pcie_gart_init(struct radeon_device *rdev)
1108{
1109 int r;
1110
1111 if (rdev->gart.robj) {
1112 WARN(1, "R600 PCIE GART already initialized\n");
1113 return 0;
1114 }
1115
1116 r = radeon_gart_init(rdev);
1117 if (r)
1118 return r;
1119 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8;
1120 return radeon_gart_table_vram_alloc(rdev);
1121}
1122
1123static int r600_pcie_gart_enable(struct radeon_device *rdev)
1124{
1125 u32 tmp;
1126 int r, i;
1127
1128 if (rdev->gart.robj == NULL) {
1129 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
1130 return -EINVAL;
1131 }
1132 r = radeon_gart_table_vram_pin(rdev);
1133 if (r)
1134 return r;
1135
1136
1137 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
1138 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
1139 EFFECTIVE_L2_QUEUE_SIZE(7));
1140 WREG32(VM_L2_CNTL2, 0);
1141 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
1142
1143 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
1144 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
1145 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
1146 ENABLE_WAIT_L2_QUERY;
1147 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
1148 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
1149 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp | ENABLE_L1_STRICT_ORDERING);
1150 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1151 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1152 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1153 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1154 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1155 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1156 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1157 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1158 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1159 WREG32(MC_VM_L1_TLB_MCB_RD_UVD_CNTL, tmp);
1160 WREG32(MC_VM_L1_TLB_MCB_WR_UVD_CNTL, tmp);
1161 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1162 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1163 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
1164 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
1165 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
1166 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
1167 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
1168 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
1169 (u32)(rdev->dummy_page.addr >> 12));
1170 for (i = 1; i < 7; i++)
1171 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1172
1173 r600_pcie_gart_tlb_flush(rdev);
1174 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
1175 (unsigned)(rdev->mc.gtt_size >> 20),
1176 (unsigned long long)rdev->gart.table_addr);
1177 rdev->gart.ready = true;
1178 return 0;
1179}
1180
1181static void r600_pcie_gart_disable(struct radeon_device *rdev)
1182{
1183 u32 tmp;
1184 int i;
1185
1186
1187 for (i = 0; i < 7; i++)
1188 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1189
1190
1191 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
1192 EFFECTIVE_L2_QUEUE_SIZE(7));
1193 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
1194
1195 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
1196 ENABLE_WAIT_L2_QUERY;
1197 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1198 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1199 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1200 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1201 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1202 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1203 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1204 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1205 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp);
1206 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp);
1207 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
1208 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
1209 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp);
1210 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1211 WREG32(MC_VM_L1_TLB_MCB_RD_UVD_CNTL, tmp);
1212 WREG32(MC_VM_L1_TLB_MCB_WR_UVD_CNTL, tmp);
1213 radeon_gart_table_vram_unpin(rdev);
1214}
1215
1216static void r600_pcie_gart_fini(struct radeon_device *rdev)
1217{
1218 radeon_gart_fini(rdev);
1219 r600_pcie_gart_disable(rdev);
1220 radeon_gart_table_vram_free(rdev);
1221}
1222
1223static void r600_agp_enable(struct radeon_device *rdev)
1224{
1225 u32 tmp;
1226 int i;
1227
1228
1229 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
1230 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
1231 EFFECTIVE_L2_QUEUE_SIZE(7));
1232 WREG32(VM_L2_CNTL2, 0);
1233 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
1234
1235 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
1236 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
1237 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
1238 ENABLE_WAIT_L2_QUERY;
1239 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
1240 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
1241 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp | ENABLE_L1_STRICT_ORDERING);
1242 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1243 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1244 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1245 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1246 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1247 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1248 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1249 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1250 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1251 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1252 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1253 for (i = 0; i < 7; i++)
1254 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1255}
1256
1257int r600_mc_wait_for_idle(struct radeon_device *rdev)
1258{
1259 unsigned i;
1260 u32 tmp;
1261
1262 for (i = 0; i < rdev->usec_timeout; i++) {
1263
1264 tmp = RREG32(R_000E50_SRBM_STATUS) & 0x3F00;
1265 if (!tmp)
1266 return 0;
1267 udelay(1);
1268 }
1269 return -1;
1270}
1271
1272uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg)
1273{
1274 unsigned long flags;
1275 uint32_t r;
1276
1277 spin_lock_irqsave(&rdev->mc_idx_lock, flags);
1278 WREG32(R_0028F8_MC_INDEX, S_0028F8_MC_IND_ADDR(reg));
1279 r = RREG32(R_0028FC_MC_DATA);
1280 WREG32(R_0028F8_MC_INDEX, ~C_0028F8_MC_IND_ADDR);
1281 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
1282 return r;
1283}
1284
1285void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
1286{
1287 unsigned long flags;
1288
1289 spin_lock_irqsave(&rdev->mc_idx_lock, flags);
1290 WREG32(R_0028F8_MC_INDEX, S_0028F8_MC_IND_ADDR(reg) |
1291 S_0028F8_MC_IND_WR_EN(1));
1292 WREG32(R_0028FC_MC_DATA, v);
1293 WREG32(R_0028F8_MC_INDEX, 0x7F);
1294 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
1295}
1296
1297static void r600_mc_program(struct radeon_device *rdev)
1298{
1299 struct rv515_mc_save save;
1300 u32 tmp;
1301 int i, j;
1302
1303
1304 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1305 WREG32((0x2c14 + j), 0x00000000);
1306 WREG32((0x2c18 + j), 0x00000000);
1307 WREG32((0x2c1c + j), 0x00000000);
1308 WREG32((0x2c20 + j), 0x00000000);
1309 WREG32((0x2c24 + j), 0x00000000);
1310 }
1311 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
1312
1313 rv515_mc_stop(rdev, &save);
1314 if (r600_mc_wait_for_idle(rdev)) {
1315 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1316 }
1317
1318 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
1319
1320 if (rdev->flags & RADEON_IS_AGP) {
1321 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1322
1323 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1324 rdev->mc.vram_start >> 12);
1325 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1326 rdev->mc.gtt_end >> 12);
1327 } else {
1328
1329 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1330 rdev->mc.gtt_start >> 12);
1331 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1332 rdev->mc.vram_end >> 12);
1333 }
1334 } else {
1335 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start >> 12);
1336 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end >> 12);
1337 }
1338 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
1339 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1340 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1341 WREG32(MC_VM_FB_LOCATION, tmp);
1342 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1343 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
1344 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
1345 if (rdev->flags & RADEON_IS_AGP) {
1346 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 22);
1347 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 22);
1348 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1349 } else {
1350 WREG32(MC_VM_AGP_BASE, 0);
1351 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
1352 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
1353 }
1354 if (r600_mc_wait_for_idle(rdev)) {
1355 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1356 }
1357 rv515_mc_resume(rdev, &save);
1358
1359
1360 rv515_vga_render_disable(rdev);
1361}
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384static void r600_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
1385{
1386 u64 size_bf, size_af;
1387
1388 if (mc->mc_vram_size > 0xE0000000) {
1389
1390 dev_warn(rdev->dev, "limiting VRAM\n");
1391 mc->real_vram_size = 0xE0000000;
1392 mc->mc_vram_size = 0xE0000000;
1393 }
1394 if (rdev->flags & RADEON_IS_AGP) {
1395 size_bf = mc->gtt_start;
1396 size_af = mc->mc_mask - mc->gtt_end;
1397 if (size_bf > size_af) {
1398 if (mc->mc_vram_size > size_bf) {
1399 dev_warn(rdev->dev, "limiting VRAM\n");
1400 mc->real_vram_size = size_bf;
1401 mc->mc_vram_size = size_bf;
1402 }
1403 mc->vram_start = mc->gtt_start - mc->mc_vram_size;
1404 } else {
1405 if (mc->mc_vram_size > size_af) {
1406 dev_warn(rdev->dev, "limiting VRAM\n");
1407 mc->real_vram_size = size_af;
1408 mc->mc_vram_size = size_af;
1409 }
1410 mc->vram_start = mc->gtt_end + 1;
1411 }
1412 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
1413 dev_info(rdev->dev, "VRAM: %lluM 0x%08llX - 0x%08llX (%lluM used)\n",
1414 mc->mc_vram_size >> 20, mc->vram_start,
1415 mc->vram_end, mc->real_vram_size >> 20);
1416 } else {
1417 u64 base = 0;
1418 if (rdev->flags & RADEON_IS_IGP) {
1419 base = RREG32(MC_VM_FB_LOCATION) & 0xFFFF;
1420 base <<= 24;
1421 }
1422 radeon_vram_location(rdev, &rdev->mc, base);
1423 rdev->mc.gtt_base_align = 0;
1424 radeon_gtt_location(rdev, mc);
1425 }
1426}
1427
1428static int r600_mc_init(struct radeon_device *rdev)
1429{
1430 u32 tmp;
1431 int chansize, numchan;
1432 uint32_t h_addr, l_addr;
1433 unsigned long long k8_addr;
1434
1435
1436 rdev->mc.vram_is_ddr = true;
1437 tmp = RREG32(RAMCFG);
1438 if (tmp & CHANSIZE_OVERRIDE) {
1439 chansize = 16;
1440 } else if (tmp & CHANSIZE_MASK) {
1441 chansize = 64;
1442 } else {
1443 chansize = 32;
1444 }
1445 tmp = RREG32(CHMAP);
1446 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1447 case 0:
1448 default:
1449 numchan = 1;
1450 break;
1451 case 1:
1452 numchan = 2;
1453 break;
1454 case 2:
1455 numchan = 4;
1456 break;
1457 case 3:
1458 numchan = 8;
1459 break;
1460 }
1461 rdev->mc.vram_width = numchan * chansize;
1462
1463 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1464 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1465
1466 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
1467 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
1468 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1469 r600_vram_gtt_location(rdev, &rdev->mc);
1470
1471 if (rdev->flags & RADEON_IS_IGP) {
1472 rs690_pm_info(rdev);
1473 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
1474
1475 if (rdev->family == CHIP_RS780 || rdev->family == CHIP_RS880) {
1476
1477 rdev->fastfb_working = false;
1478 h_addr = G_000012_K8_ADDR_EXT(RREG32_MC(R_000012_MC_MISC_UMA_CNTL));
1479 l_addr = RREG32_MC(R_000011_K8_FB_LOCATION);
1480 k8_addr = ((unsigned long long)h_addr) << 32 | l_addr;
1481#if defined(CONFIG_X86_32) && !defined(CONFIG_X86_PAE)
1482 if (k8_addr + rdev->mc.visible_vram_size < 0x100000000ULL)
1483#endif
1484 {
1485
1486
1487
1488 if (rdev->mc.igp_sideport_enabled == false && radeon_fastfb == 1) {
1489 DRM_INFO("Direct mapping: aper base at 0x%llx, replaced by direct mapping base 0x%llx.\n",
1490 (unsigned long long)rdev->mc.aper_base, k8_addr);
1491 rdev->mc.aper_base = (resource_size_t)k8_addr;
1492 rdev->fastfb_working = true;
1493 }
1494 }
1495 }
1496 }
1497
1498 radeon_update_bandwidth_info(rdev);
1499 return 0;
1500}
1501
1502int r600_vram_scratch_init(struct radeon_device *rdev)
1503{
1504 int r;
1505
1506 if (rdev->vram_scratch.robj == NULL) {
1507 r = radeon_bo_create(rdev, RADEON_GPU_PAGE_SIZE,
1508 PAGE_SIZE, true, RADEON_GEM_DOMAIN_VRAM,
1509 0, NULL, NULL, &rdev->vram_scratch.robj);
1510 if (r) {
1511 return r;
1512 }
1513 }
1514
1515 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1516 if (unlikely(r != 0))
1517 return r;
1518 r = radeon_bo_pin(rdev->vram_scratch.robj,
1519 RADEON_GEM_DOMAIN_VRAM, &rdev->vram_scratch.gpu_addr);
1520 if (r) {
1521 radeon_bo_unreserve(rdev->vram_scratch.robj);
1522 return r;
1523 }
1524 r = radeon_bo_kmap(rdev->vram_scratch.robj,
1525 (void **)&rdev->vram_scratch.ptr);
1526 if (r)
1527 radeon_bo_unpin(rdev->vram_scratch.robj);
1528 radeon_bo_unreserve(rdev->vram_scratch.robj);
1529
1530 return r;
1531}
1532
1533void r600_vram_scratch_fini(struct radeon_device *rdev)
1534{
1535 int r;
1536
1537 if (rdev->vram_scratch.robj == NULL) {
1538 return;
1539 }
1540 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1541 if (likely(r == 0)) {
1542 radeon_bo_kunmap(rdev->vram_scratch.robj);
1543 radeon_bo_unpin(rdev->vram_scratch.robj);
1544 radeon_bo_unreserve(rdev->vram_scratch.robj);
1545 }
1546 radeon_bo_unref(&rdev->vram_scratch.robj);
1547}
1548
1549void r600_set_bios_scratch_engine_hung(struct radeon_device *rdev, bool hung)
1550{
1551 u32 tmp = RREG32(R600_BIOS_3_SCRATCH);
1552
1553 if (hung)
1554 tmp |= ATOM_S3_ASIC_GUI_ENGINE_HUNG;
1555 else
1556 tmp &= ~ATOM_S3_ASIC_GUI_ENGINE_HUNG;
1557
1558 WREG32(R600_BIOS_3_SCRATCH, tmp);
1559}
1560
1561static void r600_print_gpu_status_regs(struct radeon_device *rdev)
1562{
1563 dev_info(rdev->dev, " R_008010_GRBM_STATUS = 0x%08X\n",
1564 RREG32(R_008010_GRBM_STATUS));
1565 dev_info(rdev->dev, " R_008014_GRBM_STATUS2 = 0x%08X\n",
1566 RREG32(R_008014_GRBM_STATUS2));
1567 dev_info(rdev->dev, " R_000E50_SRBM_STATUS = 0x%08X\n",
1568 RREG32(R_000E50_SRBM_STATUS));
1569 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
1570 RREG32(CP_STALLED_STAT1));
1571 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
1572 RREG32(CP_STALLED_STAT2));
1573 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
1574 RREG32(CP_BUSY_STAT));
1575 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
1576 RREG32(CP_STAT));
1577 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
1578 RREG32(DMA_STATUS_REG));
1579}
1580
1581static bool r600_is_display_hung(struct radeon_device *rdev)
1582{
1583 u32 crtc_hung = 0;
1584 u32 crtc_status[2];
1585 u32 i, j, tmp;
1586
1587 for (i = 0; i < rdev->num_crtc; i++) {
1588 if (RREG32(AVIVO_D1CRTC_CONTROL + crtc_offsets[i]) & AVIVO_CRTC_EN) {
1589 crtc_status[i] = RREG32(AVIVO_D1CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
1590 crtc_hung |= (1 << i);
1591 }
1592 }
1593
1594 for (j = 0; j < 10; j++) {
1595 for (i = 0; i < rdev->num_crtc; i++) {
1596 if (crtc_hung & (1 << i)) {
1597 tmp = RREG32(AVIVO_D1CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
1598 if (tmp != crtc_status[i])
1599 crtc_hung &= ~(1 << i);
1600 }
1601 }
1602 if (crtc_hung == 0)
1603 return false;
1604 udelay(100);
1605 }
1606
1607 return true;
1608}
1609
1610u32 r600_gpu_check_soft_reset(struct radeon_device *rdev)
1611{
1612 u32 reset_mask = 0;
1613 u32 tmp;
1614
1615
1616 tmp = RREG32(R_008010_GRBM_STATUS);
1617 if (rdev->family >= CHIP_RV770) {
1618 if (G_008010_PA_BUSY(tmp) | G_008010_SC_BUSY(tmp) |
1619 G_008010_SH_BUSY(tmp) | G_008010_SX_BUSY(tmp) |
1620 G_008010_TA_BUSY(tmp) | G_008010_VGT_BUSY(tmp) |
1621 G_008010_DB03_BUSY(tmp) | G_008010_CB03_BUSY(tmp) |
1622 G_008010_SPI03_BUSY(tmp) | G_008010_VGT_BUSY_NO_DMA(tmp))
1623 reset_mask |= RADEON_RESET_GFX;
1624 } else {
1625 if (G_008010_PA_BUSY(tmp) | G_008010_SC_BUSY(tmp) |
1626 G_008010_SH_BUSY(tmp) | G_008010_SX_BUSY(tmp) |
1627 G_008010_TA03_BUSY(tmp) | G_008010_VGT_BUSY(tmp) |
1628 G_008010_DB03_BUSY(tmp) | G_008010_CB03_BUSY(tmp) |
1629 G_008010_SPI03_BUSY(tmp) | G_008010_VGT_BUSY_NO_DMA(tmp))
1630 reset_mask |= RADEON_RESET_GFX;
1631 }
1632
1633 if (G_008010_CF_RQ_PENDING(tmp) | G_008010_PF_RQ_PENDING(tmp) |
1634 G_008010_CP_BUSY(tmp) | G_008010_CP_COHERENCY_BUSY(tmp))
1635 reset_mask |= RADEON_RESET_CP;
1636
1637 if (G_008010_GRBM_EE_BUSY(tmp))
1638 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
1639
1640
1641 tmp = RREG32(DMA_STATUS_REG);
1642 if (!(tmp & DMA_IDLE))
1643 reset_mask |= RADEON_RESET_DMA;
1644
1645
1646 tmp = RREG32(R_000E50_SRBM_STATUS);
1647 if (G_000E50_RLC_RQ_PENDING(tmp) | G_000E50_RLC_BUSY(tmp))
1648 reset_mask |= RADEON_RESET_RLC;
1649
1650 if (G_000E50_IH_BUSY(tmp))
1651 reset_mask |= RADEON_RESET_IH;
1652
1653 if (G_000E50_SEM_BUSY(tmp))
1654 reset_mask |= RADEON_RESET_SEM;
1655
1656 if (G_000E50_GRBM_RQ_PENDING(tmp))
1657 reset_mask |= RADEON_RESET_GRBM;
1658
1659 if (G_000E50_VMC_BUSY(tmp))
1660 reset_mask |= RADEON_RESET_VMC;
1661
1662 if (G_000E50_MCB_BUSY(tmp) | G_000E50_MCDZ_BUSY(tmp) |
1663 G_000E50_MCDY_BUSY(tmp) | G_000E50_MCDX_BUSY(tmp) |
1664 G_000E50_MCDW_BUSY(tmp))
1665 reset_mask |= RADEON_RESET_MC;
1666
1667 if (r600_is_display_hung(rdev))
1668 reset_mask |= RADEON_RESET_DISPLAY;
1669
1670
1671 if (reset_mask & RADEON_RESET_MC) {
1672 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
1673 reset_mask &= ~RADEON_RESET_MC;
1674 }
1675
1676 return reset_mask;
1677}
1678
1679static void r600_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
1680{
1681 struct rv515_mc_save save;
1682 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
1683 u32 tmp;
1684
1685 if (reset_mask == 0)
1686 return;
1687
1688 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
1689
1690 r600_print_gpu_status_regs(rdev);
1691
1692
1693 if (rdev->family >= CHIP_RV770)
1694 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1) | S_0086D8_CP_PFP_HALT(1));
1695 else
1696 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
1697
1698
1699 WREG32(RLC_CNTL, 0);
1700
1701 if (reset_mask & RADEON_RESET_DMA) {
1702
1703 tmp = RREG32(DMA_RB_CNTL);
1704 tmp &= ~DMA_RB_ENABLE;
1705 WREG32(DMA_RB_CNTL, tmp);
1706 }
1707
1708 mdelay(50);
1709
1710 rv515_mc_stop(rdev, &save);
1711 if (r600_mc_wait_for_idle(rdev)) {
1712 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1713 }
1714
1715 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
1716 if (rdev->family >= CHIP_RV770)
1717 grbm_soft_reset |= S_008020_SOFT_RESET_DB(1) |
1718 S_008020_SOFT_RESET_CB(1) |
1719 S_008020_SOFT_RESET_PA(1) |
1720 S_008020_SOFT_RESET_SC(1) |
1721 S_008020_SOFT_RESET_SPI(1) |
1722 S_008020_SOFT_RESET_SX(1) |
1723 S_008020_SOFT_RESET_SH(1) |
1724 S_008020_SOFT_RESET_TC(1) |
1725 S_008020_SOFT_RESET_TA(1) |
1726 S_008020_SOFT_RESET_VC(1) |
1727 S_008020_SOFT_RESET_VGT(1);
1728 else
1729 grbm_soft_reset |= S_008020_SOFT_RESET_CR(1) |
1730 S_008020_SOFT_RESET_DB(1) |
1731 S_008020_SOFT_RESET_CB(1) |
1732 S_008020_SOFT_RESET_PA(1) |
1733 S_008020_SOFT_RESET_SC(1) |
1734 S_008020_SOFT_RESET_SMX(1) |
1735 S_008020_SOFT_RESET_SPI(1) |
1736 S_008020_SOFT_RESET_SX(1) |
1737 S_008020_SOFT_RESET_SH(1) |
1738 S_008020_SOFT_RESET_TC(1) |
1739 S_008020_SOFT_RESET_TA(1) |
1740 S_008020_SOFT_RESET_VC(1) |
1741 S_008020_SOFT_RESET_VGT(1);
1742 }
1743
1744 if (reset_mask & RADEON_RESET_CP) {
1745 grbm_soft_reset |= S_008020_SOFT_RESET_CP(1) |
1746 S_008020_SOFT_RESET_VGT(1);
1747
1748 srbm_soft_reset |= S_000E60_SOFT_RESET_GRBM(1);
1749 }
1750
1751 if (reset_mask & RADEON_RESET_DMA) {
1752 if (rdev->family >= CHIP_RV770)
1753 srbm_soft_reset |= RV770_SOFT_RESET_DMA;
1754 else
1755 srbm_soft_reset |= SOFT_RESET_DMA;
1756 }
1757
1758 if (reset_mask & RADEON_RESET_RLC)
1759 srbm_soft_reset |= S_000E60_SOFT_RESET_RLC(1);
1760
1761 if (reset_mask & RADEON_RESET_SEM)
1762 srbm_soft_reset |= S_000E60_SOFT_RESET_SEM(1);
1763
1764 if (reset_mask & RADEON_RESET_IH)
1765 srbm_soft_reset |= S_000E60_SOFT_RESET_IH(1);
1766
1767 if (reset_mask & RADEON_RESET_GRBM)
1768 srbm_soft_reset |= S_000E60_SOFT_RESET_GRBM(1);
1769
1770 if (!(rdev->flags & RADEON_IS_IGP)) {
1771 if (reset_mask & RADEON_RESET_MC)
1772 srbm_soft_reset |= S_000E60_SOFT_RESET_MC(1);
1773 }
1774
1775 if (reset_mask & RADEON_RESET_VMC)
1776 srbm_soft_reset |= S_000E60_SOFT_RESET_VMC(1);
1777
1778 if (grbm_soft_reset) {
1779 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1780 tmp |= grbm_soft_reset;
1781 dev_info(rdev->dev, "R_008020_GRBM_SOFT_RESET=0x%08X\n", tmp);
1782 WREG32(R_008020_GRBM_SOFT_RESET, tmp);
1783 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1784
1785 udelay(50);
1786
1787 tmp &= ~grbm_soft_reset;
1788 WREG32(R_008020_GRBM_SOFT_RESET, tmp);
1789 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1790 }
1791
1792 if (srbm_soft_reset) {
1793 tmp = RREG32(SRBM_SOFT_RESET);
1794 tmp |= srbm_soft_reset;
1795 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
1796 WREG32(SRBM_SOFT_RESET, tmp);
1797 tmp = RREG32(SRBM_SOFT_RESET);
1798
1799 udelay(50);
1800
1801 tmp &= ~srbm_soft_reset;
1802 WREG32(SRBM_SOFT_RESET, tmp);
1803 tmp = RREG32(SRBM_SOFT_RESET);
1804 }
1805
1806
1807 mdelay(1);
1808
1809 rv515_mc_resume(rdev, &save);
1810 udelay(50);
1811
1812 r600_print_gpu_status_regs(rdev);
1813}
1814
1815static void r600_gpu_pci_config_reset(struct radeon_device *rdev)
1816{
1817 struct rv515_mc_save save;
1818 u32 tmp, i;
1819
1820 dev_info(rdev->dev, "GPU pci config reset\n");
1821
1822
1823
1824
1825 if (rdev->family >= CHIP_RV770)
1826 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1) | S_0086D8_CP_PFP_HALT(1));
1827 else
1828 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
1829
1830
1831 WREG32(RLC_CNTL, 0);
1832
1833
1834 tmp = RREG32(DMA_RB_CNTL);
1835 tmp &= ~DMA_RB_ENABLE;
1836 WREG32(DMA_RB_CNTL, tmp);
1837
1838 mdelay(50);
1839
1840
1841 if (rdev->family >= CHIP_RV770)
1842 rv770_set_clk_bypass_mode(rdev);
1843
1844 pci_clear_master(rdev->pdev);
1845
1846 rv515_mc_stop(rdev, &save);
1847 if (r600_mc_wait_for_idle(rdev)) {
1848 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1849 }
1850
1851
1852 tmp = RREG32(BUS_CNTL);
1853 tmp |= VGA_COHE_SPEC_TIMER_DIS;
1854 WREG32(BUS_CNTL, tmp);
1855
1856 tmp = RREG32(BIF_SCRATCH0);
1857
1858
1859 radeon_pci_config_reset(rdev);
1860 mdelay(1);
1861
1862
1863 tmp = SOFT_RESET_BIF;
1864 WREG32(SRBM_SOFT_RESET, tmp);
1865 mdelay(1);
1866 WREG32(SRBM_SOFT_RESET, 0);
1867
1868
1869 for (i = 0; i < rdev->usec_timeout; i++) {
1870 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
1871 break;
1872 udelay(1);
1873 }
1874}
1875
1876int r600_asic_reset(struct radeon_device *rdev, bool hard)
1877{
1878 u32 reset_mask;
1879
1880 if (hard) {
1881 r600_gpu_pci_config_reset(rdev);
1882 return 0;
1883 }
1884
1885 reset_mask = r600_gpu_check_soft_reset(rdev);
1886
1887 if (reset_mask)
1888 r600_set_bios_scratch_engine_hung(rdev, true);
1889
1890
1891 r600_gpu_soft_reset(rdev, reset_mask);
1892
1893 reset_mask = r600_gpu_check_soft_reset(rdev);
1894
1895
1896 if (reset_mask && radeon_hard_reset)
1897 r600_gpu_pci_config_reset(rdev);
1898
1899 reset_mask = r600_gpu_check_soft_reset(rdev);
1900
1901 if (!reset_mask)
1902 r600_set_bios_scratch_engine_hung(rdev, false);
1903
1904 return 0;
1905}
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
1917{
1918 u32 reset_mask = r600_gpu_check_soft_reset(rdev);
1919
1920 if (!(reset_mask & (RADEON_RESET_GFX |
1921 RADEON_RESET_COMPUTE |
1922 RADEON_RESET_CP))) {
1923 radeon_ring_lockup_update(rdev, ring);
1924 return false;
1925 }
1926 return radeon_ring_test_lockup(rdev, ring);
1927}
1928
1929u32 r6xx_remap_render_backend(struct radeon_device *rdev,
1930 u32 tiling_pipe_num,
1931 u32 max_rb_num,
1932 u32 total_max_rb_num,
1933 u32 disabled_rb_mask)
1934{
1935 u32 rendering_pipe_num, rb_num_width, req_rb_num;
1936 u32 pipe_rb_ratio, pipe_rb_remain, tmp;
1937 u32 data = 0, mask = 1 << (max_rb_num - 1);
1938 unsigned i, j;
1939
1940
1941 tmp = disabled_rb_mask | ((0xff << max_rb_num) & 0xff);
1942
1943 if ((tmp & 0xff) != 0xff)
1944 disabled_rb_mask = tmp;
1945
1946 rendering_pipe_num = 1 << tiling_pipe_num;
1947 req_rb_num = total_max_rb_num - r600_count_pipe_bits(disabled_rb_mask);
1948 BUG_ON(rendering_pipe_num < req_rb_num);
1949
1950 pipe_rb_ratio = rendering_pipe_num / req_rb_num;
1951 pipe_rb_remain = rendering_pipe_num - pipe_rb_ratio * req_rb_num;
1952
1953 if (rdev->family <= CHIP_RV740) {
1954
1955 rb_num_width = 2;
1956 } else {
1957
1958 rb_num_width = 4;
1959 }
1960
1961 for (i = 0; i < max_rb_num; i++) {
1962 if (!(mask & disabled_rb_mask)) {
1963 for (j = 0; j < pipe_rb_ratio; j++) {
1964 data <<= rb_num_width;
1965 data |= max_rb_num - i - 1;
1966 }
1967 if (pipe_rb_remain) {
1968 data <<= rb_num_width;
1969 data |= max_rb_num - i - 1;
1970 pipe_rb_remain--;
1971 }
1972 }
1973 mask >>= 1;
1974 }
1975
1976 return data;
1977}
1978
1979int r600_count_pipe_bits(uint32_t val)
1980{
1981 return hweight32(val);
1982}
1983
1984static void r600_gpu_init(struct radeon_device *rdev)
1985{
1986 u32 tiling_config;
1987 u32 ramcfg;
1988 u32 cc_gc_shader_pipe_config;
1989 u32 tmp;
1990 int i, j;
1991 u32 sq_config;
1992 u32 sq_gpr_resource_mgmt_1 = 0;
1993 u32 sq_gpr_resource_mgmt_2 = 0;
1994 u32 sq_thread_resource_mgmt = 0;
1995 u32 sq_stack_resource_mgmt_1 = 0;
1996 u32 sq_stack_resource_mgmt_2 = 0;
1997 u32 disabled_rb_mask;
1998
1999 rdev->config.r600.tiling_group_size = 256;
2000 switch (rdev->family) {
2001 case CHIP_R600:
2002 rdev->config.r600.max_pipes = 4;
2003 rdev->config.r600.max_tile_pipes = 8;
2004 rdev->config.r600.max_simds = 4;
2005 rdev->config.r600.max_backends = 4;
2006 rdev->config.r600.max_gprs = 256;
2007 rdev->config.r600.max_threads = 192;
2008 rdev->config.r600.max_stack_entries = 256;
2009 rdev->config.r600.max_hw_contexts = 8;
2010 rdev->config.r600.max_gs_threads = 16;
2011 rdev->config.r600.sx_max_export_size = 128;
2012 rdev->config.r600.sx_max_export_pos_size = 16;
2013 rdev->config.r600.sx_max_export_smx_size = 128;
2014 rdev->config.r600.sq_num_cf_insts = 2;
2015 break;
2016 case CHIP_RV630:
2017 case CHIP_RV635:
2018 rdev->config.r600.max_pipes = 2;
2019 rdev->config.r600.max_tile_pipes = 2;
2020 rdev->config.r600.max_simds = 3;
2021 rdev->config.r600.max_backends = 1;
2022 rdev->config.r600.max_gprs = 128;
2023 rdev->config.r600.max_threads = 192;
2024 rdev->config.r600.max_stack_entries = 128;
2025 rdev->config.r600.max_hw_contexts = 8;
2026 rdev->config.r600.max_gs_threads = 4;
2027 rdev->config.r600.sx_max_export_size = 128;
2028 rdev->config.r600.sx_max_export_pos_size = 16;
2029 rdev->config.r600.sx_max_export_smx_size = 128;
2030 rdev->config.r600.sq_num_cf_insts = 2;
2031 break;
2032 case CHIP_RV610:
2033 case CHIP_RV620:
2034 case CHIP_RS780:
2035 case CHIP_RS880:
2036 rdev->config.r600.max_pipes = 1;
2037 rdev->config.r600.max_tile_pipes = 1;
2038 rdev->config.r600.max_simds = 2;
2039 rdev->config.r600.max_backends = 1;
2040 rdev->config.r600.max_gprs = 128;
2041 rdev->config.r600.max_threads = 192;
2042 rdev->config.r600.max_stack_entries = 128;
2043 rdev->config.r600.max_hw_contexts = 4;
2044 rdev->config.r600.max_gs_threads = 4;
2045 rdev->config.r600.sx_max_export_size = 128;
2046 rdev->config.r600.sx_max_export_pos_size = 16;
2047 rdev->config.r600.sx_max_export_smx_size = 128;
2048 rdev->config.r600.sq_num_cf_insts = 1;
2049 break;
2050 case CHIP_RV670:
2051 rdev->config.r600.max_pipes = 4;
2052 rdev->config.r600.max_tile_pipes = 4;
2053 rdev->config.r600.max_simds = 4;
2054 rdev->config.r600.max_backends = 4;
2055 rdev->config.r600.max_gprs = 192;
2056 rdev->config.r600.max_threads = 192;
2057 rdev->config.r600.max_stack_entries = 256;
2058 rdev->config.r600.max_hw_contexts = 8;
2059 rdev->config.r600.max_gs_threads = 16;
2060 rdev->config.r600.sx_max_export_size = 128;
2061 rdev->config.r600.sx_max_export_pos_size = 16;
2062 rdev->config.r600.sx_max_export_smx_size = 128;
2063 rdev->config.r600.sq_num_cf_insts = 2;
2064 break;
2065 default:
2066 break;
2067 }
2068
2069
2070 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2071 WREG32((0x2c14 + j), 0x00000000);
2072 WREG32((0x2c18 + j), 0x00000000);
2073 WREG32((0x2c1c + j), 0x00000000);
2074 WREG32((0x2c20 + j), 0x00000000);
2075 WREG32((0x2c24 + j), 0x00000000);
2076 }
2077
2078 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
2079
2080
2081 tiling_config = 0;
2082 ramcfg = RREG32(RAMCFG);
2083 switch (rdev->config.r600.max_tile_pipes) {
2084 case 1:
2085 tiling_config |= PIPE_TILING(0);
2086 break;
2087 case 2:
2088 tiling_config |= PIPE_TILING(1);
2089 break;
2090 case 4:
2091 tiling_config |= PIPE_TILING(2);
2092 break;
2093 case 8:
2094 tiling_config |= PIPE_TILING(3);
2095 break;
2096 default:
2097 break;
2098 }
2099 rdev->config.r600.tiling_npipes = rdev->config.r600.max_tile_pipes;
2100 rdev->config.r600.tiling_nbanks = 4 << ((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
2101 tiling_config |= BANK_TILING((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
2102 tiling_config |= GROUP_SIZE((ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
2103
2104 tmp = (ramcfg & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
2105 if (tmp > 3) {
2106 tiling_config |= ROW_TILING(3);
2107 tiling_config |= SAMPLE_SPLIT(3);
2108 } else {
2109 tiling_config |= ROW_TILING(tmp);
2110 tiling_config |= SAMPLE_SPLIT(tmp);
2111 }
2112 tiling_config |= BANK_SWAPS(1);
2113
2114 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0x00ffff00;
2115 tmp = rdev->config.r600.max_simds -
2116 r600_count_pipe_bits((cc_gc_shader_pipe_config >> 16) & R6XX_MAX_SIMDS_MASK);
2117 rdev->config.r600.active_simds = tmp;
2118
2119 disabled_rb_mask = (RREG32(CC_RB_BACKEND_DISABLE) >> 16) & R6XX_MAX_BACKENDS_MASK;
2120 tmp = 0;
2121 for (i = 0; i < rdev->config.r600.max_backends; i++)
2122 tmp |= (1 << i);
2123
2124 if ((disabled_rb_mask & tmp) == tmp) {
2125 for (i = 0; i < rdev->config.r600.max_backends; i++)
2126 disabled_rb_mask &= ~(1 << i);
2127 }
2128 tmp = (tiling_config & PIPE_TILING__MASK) >> PIPE_TILING__SHIFT;
2129 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.r600.max_backends,
2130 R6XX_MAX_BACKENDS, disabled_rb_mask);
2131 tiling_config |= tmp << 16;
2132 rdev->config.r600.backend_map = tmp;
2133
2134 rdev->config.r600.tile_config = tiling_config;
2135 WREG32(GB_TILING_CONFIG, tiling_config);
2136 WREG32(DCP_TILING_CONFIG, tiling_config & 0xffff);
2137 WREG32(HDP_TILING_CONFIG, tiling_config & 0xffff);
2138 WREG32(DMA_TILING_CONFIG, tiling_config & 0xffff);
2139
2140 tmp = R6XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK) >> 8);
2141 WREG32(VGT_OUT_DEALLOC_CNTL, (tmp * 4) & DEALLOC_DIST_MASK);
2142 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((tmp * 4) - 2) & VTX_REUSE_DEPTH_MASK);
2143
2144
2145 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) | ROQ_IB2_START(0x2b)));
2146 WREG32(CP_MEQ_THRESHOLDS, (MEQ_END(0x40) | ROQ_END(0x40)));
2147
2148 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO | SYNC_GRADIENT |
2149 SYNC_WALKER | SYNC_ALIGNER));
2150
2151 if (rdev->family == CHIP_RV670)
2152 WREG32(ARB_GDEC_RD_CNTL, 0x00000021);
2153
2154 tmp = RREG32(SX_DEBUG_1);
2155 tmp |= SMX_EVENT_RELEASE;
2156 if ((rdev->family > CHIP_R600))
2157 tmp |= ENABLE_NEW_SMX_ADDRESS;
2158 WREG32(SX_DEBUG_1, tmp);
2159
2160 if (((rdev->family) == CHIP_R600) ||
2161 ((rdev->family) == CHIP_RV630) ||
2162 ((rdev->family) == CHIP_RV610) ||
2163 ((rdev->family) == CHIP_RV620) ||
2164 ((rdev->family) == CHIP_RS780) ||
2165 ((rdev->family) == CHIP_RS880)) {
2166 WREG32(DB_DEBUG, PREZ_MUST_WAIT_FOR_POSTZ_DONE);
2167 } else {
2168 WREG32(DB_DEBUG, 0);
2169 }
2170 WREG32(DB_WATERMARKS, (DEPTH_FREE(4) | DEPTH_CACHELINE_FREE(16) |
2171 DEPTH_FLUSH(16) | DEPTH_PENDING_FREE(4)));
2172
2173 WREG32(PA_SC_MULTI_CHIP_CNTL, 0);
2174 WREG32(VGT_NUM_INSTANCES, 0);
2175
2176 WREG32(SPI_CONFIG_CNTL, GPR_WRITE_PRIORITY(0));
2177 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(0));
2178
2179 tmp = RREG32(SQ_MS_FIFO_SIZES);
2180 if (((rdev->family) == CHIP_RV610) ||
2181 ((rdev->family) == CHIP_RV620) ||
2182 ((rdev->family) == CHIP_RS780) ||
2183 ((rdev->family) == CHIP_RS880)) {
2184 tmp = (CACHE_FIFO_SIZE(0xa) |
2185 FETCH_FIFO_HIWATER(0xa) |
2186 DONE_FIFO_HIWATER(0xe0) |
2187 ALU_UPDATE_FIFO_HIWATER(0x8));
2188 } else if (((rdev->family) == CHIP_R600) ||
2189 ((rdev->family) == CHIP_RV630)) {
2190 tmp &= ~DONE_FIFO_HIWATER(0xff);
2191 tmp |= DONE_FIFO_HIWATER(0x4);
2192 }
2193 WREG32(SQ_MS_FIFO_SIZES, tmp);
2194
2195
2196
2197
2198 sq_config = RREG32(SQ_CONFIG);
2199 sq_config &= ~(PS_PRIO(3) |
2200 VS_PRIO(3) |
2201 GS_PRIO(3) |
2202 ES_PRIO(3));
2203 sq_config |= (DX9_CONSTS |
2204 VC_ENABLE |
2205 PS_PRIO(0) |
2206 VS_PRIO(1) |
2207 GS_PRIO(2) |
2208 ES_PRIO(3));
2209
2210 if ((rdev->family) == CHIP_R600) {
2211 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(124) |
2212 NUM_VS_GPRS(124) |
2213 NUM_CLAUSE_TEMP_GPRS(4));
2214 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(0) |
2215 NUM_ES_GPRS(0));
2216 sq_thread_resource_mgmt = (NUM_PS_THREADS(136) |
2217 NUM_VS_THREADS(48) |
2218 NUM_GS_THREADS(4) |
2219 NUM_ES_THREADS(4));
2220 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(128) |
2221 NUM_VS_STACK_ENTRIES(128));
2222 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(0) |
2223 NUM_ES_STACK_ENTRIES(0));
2224 } else if (((rdev->family) == CHIP_RV610) ||
2225 ((rdev->family) == CHIP_RV620) ||
2226 ((rdev->family) == CHIP_RS780) ||
2227 ((rdev->family) == CHIP_RS880)) {
2228
2229 sq_config &= ~VC_ENABLE;
2230
2231 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
2232 NUM_VS_GPRS(44) |
2233 NUM_CLAUSE_TEMP_GPRS(2));
2234 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(17) |
2235 NUM_ES_GPRS(17));
2236 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
2237 NUM_VS_THREADS(78) |
2238 NUM_GS_THREADS(4) |
2239 NUM_ES_THREADS(31));
2240 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(40) |
2241 NUM_VS_STACK_ENTRIES(40));
2242 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(32) |
2243 NUM_ES_STACK_ENTRIES(16));
2244 } else if (((rdev->family) == CHIP_RV630) ||
2245 ((rdev->family) == CHIP_RV635)) {
2246 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
2247 NUM_VS_GPRS(44) |
2248 NUM_CLAUSE_TEMP_GPRS(2));
2249 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(18) |
2250 NUM_ES_GPRS(18));
2251 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
2252 NUM_VS_THREADS(78) |
2253 NUM_GS_THREADS(4) |
2254 NUM_ES_THREADS(31));
2255 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(40) |
2256 NUM_VS_STACK_ENTRIES(40));
2257 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(32) |
2258 NUM_ES_STACK_ENTRIES(16));
2259 } else if ((rdev->family) == CHIP_RV670) {
2260 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
2261 NUM_VS_GPRS(44) |
2262 NUM_CLAUSE_TEMP_GPRS(2));
2263 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(17) |
2264 NUM_ES_GPRS(17));
2265 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
2266 NUM_VS_THREADS(78) |
2267 NUM_GS_THREADS(4) |
2268 NUM_ES_THREADS(31));
2269 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(64) |
2270 NUM_VS_STACK_ENTRIES(64));
2271 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(64) |
2272 NUM_ES_STACK_ENTRIES(64));
2273 }
2274
2275 WREG32(SQ_CONFIG, sq_config);
2276 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
2277 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
2278 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
2279 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
2280 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
2281
2282 if (((rdev->family) == CHIP_RV610) ||
2283 ((rdev->family) == CHIP_RV620) ||
2284 ((rdev->family) == CHIP_RS780) ||
2285 ((rdev->family) == CHIP_RS880)) {
2286 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(TC_ONLY));
2287 } else {
2288 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(VC_AND_TC));
2289 }
2290
2291
2292 WREG32(PA_SC_AA_SAMPLE_LOCS_2S, (S0_X(0xc) | S0_Y(0x4) |
2293 S1_X(0x4) | S1_Y(0xc)));
2294 WREG32(PA_SC_AA_SAMPLE_LOCS_4S, (S0_X(0xe) | S0_Y(0xe) |
2295 S1_X(0x2) | S1_Y(0x2) |
2296 S2_X(0xa) | S2_Y(0x6) |
2297 S3_X(0x6) | S3_Y(0xa)));
2298 WREG32(PA_SC_AA_SAMPLE_LOCS_8S_WD0, (S0_X(0xe) | S0_Y(0xb) |
2299 S1_X(0x4) | S1_Y(0xc) |
2300 S2_X(0x1) | S2_Y(0x6) |
2301 S3_X(0xa) | S3_Y(0xe)));
2302 WREG32(PA_SC_AA_SAMPLE_LOCS_8S_WD1, (S4_X(0x6) | S4_Y(0x1) |
2303 S5_X(0x0) | S5_Y(0x0) |
2304 S6_X(0xb) | S6_Y(0x4) |
2305 S7_X(0x7) | S7_Y(0x8)));
2306
2307 WREG32(VGT_STRMOUT_EN, 0);
2308 tmp = rdev->config.r600.max_pipes * 16;
2309 switch (rdev->family) {
2310 case CHIP_RV610:
2311 case CHIP_RV620:
2312 case CHIP_RS780:
2313 case CHIP_RS880:
2314 tmp += 32;
2315 break;
2316 case CHIP_RV670:
2317 tmp += 128;
2318 break;
2319 default:
2320 break;
2321 }
2322 if (tmp > 256) {
2323 tmp = 256;
2324 }
2325 WREG32(VGT_ES_PER_GS, 128);
2326 WREG32(VGT_GS_PER_ES, tmp);
2327 WREG32(VGT_GS_PER_VS, 2);
2328 WREG32(VGT_GS_VERTEX_REUSE, 16);
2329
2330
2331 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
2332 WREG32(VGT_STRMOUT_EN, 0);
2333 WREG32(SX_MISC, 0);
2334 WREG32(PA_SC_MODE_CNTL, 0);
2335 WREG32(PA_SC_AA_CONFIG, 0);
2336 WREG32(PA_SC_LINE_STIPPLE, 0);
2337 WREG32(SPI_INPUT_Z, 0);
2338 WREG32(SPI_PS_IN_CONTROL_0, NUM_INTERP(2));
2339 WREG32(CB_COLOR7_FRAG, 0);
2340
2341
2342 WREG32(CB_COLOR0_BASE, 0);
2343 WREG32(CB_COLOR1_BASE, 0);
2344 WREG32(CB_COLOR2_BASE, 0);
2345 WREG32(CB_COLOR3_BASE, 0);
2346 WREG32(CB_COLOR4_BASE, 0);
2347 WREG32(CB_COLOR5_BASE, 0);
2348 WREG32(CB_COLOR6_BASE, 0);
2349 WREG32(CB_COLOR7_BASE, 0);
2350 WREG32(CB_COLOR7_FRAG, 0);
2351
2352 switch (rdev->family) {
2353 case CHIP_RV610:
2354 case CHIP_RV620:
2355 case CHIP_RS780:
2356 case CHIP_RS880:
2357 tmp = TC_L2_SIZE(8);
2358 break;
2359 case CHIP_RV630:
2360 case CHIP_RV635:
2361 tmp = TC_L2_SIZE(4);
2362 break;
2363 case CHIP_R600:
2364 tmp = TC_L2_SIZE(0) | L2_DISABLE_LATE_HIT;
2365 break;
2366 default:
2367 tmp = TC_L2_SIZE(0);
2368 break;
2369 }
2370 WREG32(TC_CNTL, tmp);
2371
2372 tmp = RREG32(HDP_HOST_PATH_CNTL);
2373 WREG32(HDP_HOST_PATH_CNTL, tmp);
2374
2375 tmp = RREG32(ARB_POP);
2376 tmp |= ENABLE_TC128;
2377 WREG32(ARB_POP, tmp);
2378
2379 WREG32(PA_SC_MULTI_CHIP_CNTL, 0);
2380 WREG32(PA_CL_ENHANCE, (CLIP_VTX_REORDER_ENA |
2381 NUM_CLIP_SEQ(3)));
2382 WREG32(PA_SC_ENHANCE, FORCE_EOV_MAX_CLK_CNT(4095));
2383 WREG32(VC_ENHANCE, 0);
2384}
2385
2386
2387
2388
2389
2390u32 r600_pciep_rreg(struct radeon_device *rdev, u32 reg)
2391{
2392 unsigned long flags;
2393 u32 r;
2394
2395 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
2396 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
2397 (void)RREG32(PCIE_PORT_INDEX);
2398 r = RREG32(PCIE_PORT_DATA);
2399 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
2400 return r;
2401}
2402
2403void r600_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
2404{
2405 unsigned long flags;
2406
2407 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
2408 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
2409 (void)RREG32(PCIE_PORT_INDEX);
2410 WREG32(PCIE_PORT_DATA, (v));
2411 (void)RREG32(PCIE_PORT_DATA);
2412 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
2413}
2414
2415
2416
2417
2418void r600_cp_stop(struct radeon_device *rdev)
2419{
2420 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
2421 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
2422 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
2423 WREG32(SCRATCH_UMSK, 0);
2424 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
2425}
2426
2427int r600_init_microcode(struct radeon_device *rdev)
2428{
2429 const char *chip_name;
2430 const char *rlc_chip_name;
2431 const char *smc_chip_name = "RV770";
2432 size_t pfp_req_size, me_req_size, rlc_req_size, smc_req_size = 0;
2433 char fw_name[30];
2434 int err;
2435
2436 DRM_DEBUG("\n");
2437
2438 switch (rdev->family) {
2439 case CHIP_R600:
2440 chip_name = "R600";
2441 rlc_chip_name = "R600";
2442 break;
2443 case CHIP_RV610:
2444 chip_name = "RV610";
2445 rlc_chip_name = "R600";
2446 break;
2447 case CHIP_RV630:
2448 chip_name = "RV630";
2449 rlc_chip_name = "R600";
2450 break;
2451 case CHIP_RV620:
2452 chip_name = "RV620";
2453 rlc_chip_name = "R600";
2454 break;
2455 case CHIP_RV635:
2456 chip_name = "RV635";
2457 rlc_chip_name = "R600";
2458 break;
2459 case CHIP_RV670:
2460 chip_name = "RV670";
2461 rlc_chip_name = "R600";
2462 break;
2463 case CHIP_RS780:
2464 case CHIP_RS880:
2465 chip_name = "RS780";
2466 rlc_chip_name = "R600";
2467 break;
2468 case CHIP_RV770:
2469 chip_name = "RV770";
2470 rlc_chip_name = "R700";
2471 smc_chip_name = "RV770";
2472 smc_req_size = ALIGN(RV770_SMC_UCODE_SIZE, 4);
2473 break;
2474 case CHIP_RV730:
2475 chip_name = "RV730";
2476 rlc_chip_name = "R700";
2477 smc_chip_name = "RV730";
2478 smc_req_size = ALIGN(RV730_SMC_UCODE_SIZE, 4);
2479 break;
2480 case CHIP_RV710:
2481 chip_name = "RV710";
2482 rlc_chip_name = "R700";
2483 smc_chip_name = "RV710";
2484 smc_req_size = ALIGN(RV710_SMC_UCODE_SIZE, 4);
2485 break;
2486 case CHIP_RV740:
2487 chip_name = "RV730";
2488 rlc_chip_name = "R700";
2489 smc_chip_name = "RV740";
2490 smc_req_size = ALIGN(RV740_SMC_UCODE_SIZE, 4);
2491 break;
2492 case CHIP_CEDAR:
2493 chip_name = "CEDAR";
2494 rlc_chip_name = "CEDAR";
2495 smc_chip_name = "CEDAR";
2496 smc_req_size = ALIGN(CEDAR_SMC_UCODE_SIZE, 4);
2497 break;
2498 case CHIP_REDWOOD:
2499 chip_name = "REDWOOD";
2500 rlc_chip_name = "REDWOOD";
2501 smc_chip_name = "REDWOOD";
2502 smc_req_size = ALIGN(REDWOOD_SMC_UCODE_SIZE, 4);
2503 break;
2504 case CHIP_JUNIPER:
2505 chip_name = "JUNIPER";
2506 rlc_chip_name = "JUNIPER";
2507 smc_chip_name = "JUNIPER";
2508 smc_req_size = ALIGN(JUNIPER_SMC_UCODE_SIZE, 4);
2509 break;
2510 case CHIP_CYPRESS:
2511 case CHIP_HEMLOCK:
2512 chip_name = "CYPRESS";
2513 rlc_chip_name = "CYPRESS";
2514 smc_chip_name = "CYPRESS";
2515 smc_req_size = ALIGN(CYPRESS_SMC_UCODE_SIZE, 4);
2516 break;
2517 case CHIP_PALM:
2518 chip_name = "PALM";
2519 rlc_chip_name = "SUMO";
2520 break;
2521 case CHIP_SUMO:
2522 chip_name = "SUMO";
2523 rlc_chip_name = "SUMO";
2524 break;
2525 case CHIP_SUMO2:
2526 chip_name = "SUMO2";
2527 rlc_chip_name = "SUMO";
2528 break;
2529 default: BUG();
2530 }
2531
2532 if (rdev->family >= CHIP_CEDAR) {
2533 pfp_req_size = EVERGREEN_PFP_UCODE_SIZE * 4;
2534 me_req_size = EVERGREEN_PM4_UCODE_SIZE * 4;
2535 rlc_req_size = EVERGREEN_RLC_UCODE_SIZE * 4;
2536 } else if (rdev->family >= CHIP_RV770) {
2537 pfp_req_size = R700_PFP_UCODE_SIZE * 4;
2538 me_req_size = R700_PM4_UCODE_SIZE * 4;
2539 rlc_req_size = R700_RLC_UCODE_SIZE * 4;
2540 } else {
2541 pfp_req_size = R600_PFP_UCODE_SIZE * 4;
2542 me_req_size = R600_PM4_UCODE_SIZE * 12;
2543 rlc_req_size = R600_RLC_UCODE_SIZE * 4;
2544 }
2545
2546 DRM_INFO("Loading %s Microcode\n", chip_name);
2547
2548 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
2549 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
2550 if (err)
2551 goto out;
2552 if (rdev->pfp_fw->size != pfp_req_size) {
2553 pr_err("r600_cp: Bogus length %zu in firmware \"%s\"\n",
2554 rdev->pfp_fw->size, fw_name);
2555 err = -EINVAL;
2556 goto out;
2557 }
2558
2559 snprintf(fw_name, sizeof(fw_name), "radeon/%s_me.bin", chip_name);
2560 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
2561 if (err)
2562 goto out;
2563 if (rdev->me_fw->size != me_req_size) {
2564 pr_err("r600_cp: Bogus length %zu in firmware \"%s\"\n",
2565 rdev->me_fw->size, fw_name);
2566 err = -EINVAL;
2567 }
2568
2569 snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
2570 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
2571 if (err)
2572 goto out;
2573 if (rdev->rlc_fw->size != rlc_req_size) {
2574 pr_err("r600_rlc: Bogus length %zu in firmware \"%s\"\n",
2575 rdev->rlc_fw->size, fw_name);
2576 err = -EINVAL;
2577 }
2578
2579 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_HEMLOCK)) {
2580 snprintf(fw_name, sizeof(fw_name), "radeon/%s_smc.bin", smc_chip_name);
2581 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
2582 if (err) {
2583 pr_err("smc: error loading firmware \"%s\"\n", fw_name);
2584 release_firmware(rdev->smc_fw);
2585 rdev->smc_fw = NULL;
2586 err = 0;
2587 } else if (rdev->smc_fw->size != smc_req_size) {
2588 pr_err("smc: Bogus length %zu in firmware \"%s\"\n",
2589 rdev->smc_fw->size, fw_name);
2590 err = -EINVAL;
2591 }
2592 }
2593
2594out:
2595 if (err) {
2596 if (err != -EINVAL)
2597 pr_err("r600_cp: Failed to load firmware \"%s\"\n",
2598 fw_name);
2599 release_firmware(rdev->pfp_fw);
2600 rdev->pfp_fw = NULL;
2601 release_firmware(rdev->me_fw);
2602 rdev->me_fw = NULL;
2603 release_firmware(rdev->rlc_fw);
2604 rdev->rlc_fw = NULL;
2605 release_firmware(rdev->smc_fw);
2606 rdev->smc_fw = NULL;
2607 }
2608 return err;
2609}
2610
2611u32 r600_gfx_get_rptr(struct radeon_device *rdev,
2612 struct radeon_ring *ring)
2613{
2614 u32 rptr;
2615
2616 if (rdev->wb.enabled)
2617 rptr = rdev->wb.wb[ring->rptr_offs/4];
2618 else
2619 rptr = RREG32(R600_CP_RB_RPTR);
2620
2621 return rptr;
2622}
2623
2624u32 r600_gfx_get_wptr(struct radeon_device *rdev,
2625 struct radeon_ring *ring)
2626{
2627 return RREG32(R600_CP_RB_WPTR);
2628}
2629
2630void r600_gfx_set_wptr(struct radeon_device *rdev,
2631 struct radeon_ring *ring)
2632{
2633 WREG32(R600_CP_RB_WPTR, ring->wptr);
2634 (void)RREG32(R600_CP_RB_WPTR);
2635}
2636
2637static int r600_cp_load_microcode(struct radeon_device *rdev)
2638{
2639 const __be32 *fw_data;
2640 int i;
2641
2642 if (!rdev->me_fw || !rdev->pfp_fw)
2643 return -EINVAL;
2644
2645 r600_cp_stop(rdev);
2646
2647 WREG32(CP_RB_CNTL,
2648#ifdef __BIG_ENDIAN
2649 BUF_SWAP_32BIT |
2650#endif
2651 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2652
2653
2654 WREG32(GRBM_SOFT_RESET, SOFT_RESET_CP);
2655 RREG32(GRBM_SOFT_RESET);
2656 mdelay(15);
2657 WREG32(GRBM_SOFT_RESET, 0);
2658
2659 WREG32(CP_ME_RAM_WADDR, 0);
2660
2661 fw_data = (const __be32 *)rdev->me_fw->data;
2662 WREG32(CP_ME_RAM_WADDR, 0);
2663 for (i = 0; i < R600_PM4_UCODE_SIZE * 3; i++)
2664 WREG32(CP_ME_RAM_DATA,
2665 be32_to_cpup(fw_data++));
2666
2667 fw_data = (const __be32 *)rdev->pfp_fw->data;
2668 WREG32(CP_PFP_UCODE_ADDR, 0);
2669 for (i = 0; i < R600_PFP_UCODE_SIZE; i++)
2670 WREG32(CP_PFP_UCODE_DATA,
2671 be32_to_cpup(fw_data++));
2672
2673 WREG32(CP_PFP_UCODE_ADDR, 0);
2674 WREG32(CP_ME_RAM_WADDR, 0);
2675 WREG32(CP_ME_RAM_RADDR, 0);
2676 return 0;
2677}
2678
2679int r600_cp_start(struct radeon_device *rdev)
2680{
2681 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2682 int r;
2683 uint32_t cp_me;
2684
2685 r = radeon_ring_lock(rdev, ring, 7);
2686 if (r) {
2687 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2688 return r;
2689 }
2690 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2691 radeon_ring_write(ring, 0x1);
2692 if (rdev->family >= CHIP_RV770) {
2693 radeon_ring_write(ring, 0x0);
2694 radeon_ring_write(ring, rdev->config.rv770.max_hw_contexts - 1);
2695 } else {
2696 radeon_ring_write(ring, 0x3);
2697 radeon_ring_write(ring, rdev->config.r600.max_hw_contexts - 1);
2698 }
2699 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2700 radeon_ring_write(ring, 0);
2701 radeon_ring_write(ring, 0);
2702 radeon_ring_unlock_commit(rdev, ring, false);
2703
2704 cp_me = 0xff;
2705 WREG32(R_0086D8_CP_ME_CNTL, cp_me);
2706 return 0;
2707}
2708
2709int r600_cp_resume(struct radeon_device *rdev)
2710{
2711 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2712 u32 tmp;
2713 u32 rb_bufsz;
2714 int r;
2715
2716
2717 WREG32(GRBM_SOFT_RESET, SOFT_RESET_CP);
2718 RREG32(GRBM_SOFT_RESET);
2719 mdelay(15);
2720 WREG32(GRBM_SOFT_RESET, 0);
2721
2722
2723 rb_bufsz = order_base_2(ring->ring_size / 8);
2724 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2725#ifdef __BIG_ENDIAN
2726 tmp |= BUF_SWAP_32BIT;
2727#endif
2728 WREG32(CP_RB_CNTL, tmp);
2729 WREG32(CP_SEM_WAIT_TIMER, 0x0);
2730
2731
2732 WREG32(CP_RB_WPTR_DELAY, 0);
2733
2734
2735 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2736 WREG32(CP_RB_RPTR_WR, 0);
2737 ring->wptr = 0;
2738 WREG32(CP_RB_WPTR, ring->wptr);
2739
2740
2741 WREG32(CP_RB_RPTR_ADDR,
2742 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2743 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2744 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2745
2746 if (rdev->wb.enabled)
2747 WREG32(SCRATCH_UMSK, 0xff);
2748 else {
2749 tmp |= RB_NO_UPDATE;
2750 WREG32(SCRATCH_UMSK, 0);
2751 }
2752
2753 mdelay(1);
2754 WREG32(CP_RB_CNTL, tmp);
2755
2756 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2757 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2758
2759 r600_cp_start(rdev);
2760 ring->ready = true;
2761 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2762 if (r) {
2763 ring->ready = false;
2764 return r;
2765 }
2766
2767 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
2768 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
2769
2770 return 0;
2771}
2772
2773void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size)
2774{
2775 u32 rb_bufsz;
2776 int r;
2777
2778
2779 rb_bufsz = order_base_2(ring_size / 8);
2780 ring_size = (1 << (rb_bufsz + 1)) * 4;
2781 ring->ring_size = ring_size;
2782 ring->align_mask = 16 - 1;
2783
2784 if (radeon_ring_supports_scratch_reg(rdev, ring)) {
2785 r = radeon_scratch_get(rdev, &ring->rptr_save_reg);
2786 if (r) {
2787 DRM_ERROR("failed to get scratch reg for rptr save (%d).\n", r);
2788 ring->rptr_save_reg = 0;
2789 }
2790 }
2791}
2792
2793void r600_cp_fini(struct radeon_device *rdev)
2794{
2795 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2796 r600_cp_stop(rdev);
2797 radeon_ring_fini(rdev, ring);
2798 radeon_scratch_free(rdev, ring->rptr_save_reg);
2799}
2800
2801
2802
2803
2804void r600_scratch_init(struct radeon_device *rdev)
2805{
2806 int i;
2807
2808 rdev->scratch.num_reg = 7;
2809 rdev->scratch.reg_base = SCRATCH_REG0;
2810 for (i = 0; i < rdev->scratch.num_reg; i++) {
2811 rdev->scratch.free[i] = true;
2812 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
2813 }
2814}
2815
2816int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
2817{
2818 uint32_t scratch;
2819 uint32_t tmp = 0;
2820 unsigned i;
2821 int r;
2822
2823 r = radeon_scratch_get(rdev, &scratch);
2824 if (r) {
2825 DRM_ERROR("radeon: cp failed to get scratch reg (%d).\n", r);
2826 return r;
2827 }
2828 WREG32(scratch, 0xCAFEDEAD);
2829 r = radeon_ring_lock(rdev, ring, 3);
2830 if (r) {
2831 DRM_ERROR("radeon: cp failed to lock ring %d (%d).\n", ring->idx, r);
2832 radeon_scratch_free(rdev, scratch);
2833 return r;
2834 }
2835 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2836 radeon_ring_write(ring, ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2837 radeon_ring_write(ring, 0xDEADBEEF);
2838 radeon_ring_unlock_commit(rdev, ring, false);
2839 for (i = 0; i < rdev->usec_timeout; i++) {
2840 tmp = RREG32(scratch);
2841 if (tmp == 0xDEADBEEF)
2842 break;
2843 DRM_UDELAY(1);
2844 }
2845 if (i < rdev->usec_timeout) {
2846 DRM_INFO("ring test on %d succeeded in %d usecs\n", ring->idx, i);
2847 } else {
2848 DRM_ERROR("radeon: ring %d test failed (scratch(0x%04X)=0x%08X)\n",
2849 ring->idx, scratch, tmp);
2850 r = -EINVAL;
2851 }
2852 radeon_scratch_free(rdev, scratch);
2853 return r;
2854}
2855
2856
2857
2858
2859
2860void r600_fence_ring_emit(struct radeon_device *rdev,
2861 struct radeon_fence *fence)
2862{
2863 struct radeon_ring *ring = &rdev->ring[fence->ring];
2864 u32 cp_coher_cntl = PACKET3_TC_ACTION_ENA | PACKET3_VC_ACTION_ENA |
2865 PACKET3_SH_ACTION_ENA;
2866
2867 if (rdev->family >= CHIP_RV770)
2868 cp_coher_cntl |= PACKET3_FULL_CACHE_ENA;
2869
2870 if (rdev->wb.use_event) {
2871 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
2872
2873 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
2874 radeon_ring_write(ring, cp_coher_cntl);
2875 radeon_ring_write(ring, 0xFFFFFFFF);
2876 radeon_ring_write(ring, 0);
2877 radeon_ring_write(ring, 10);
2878
2879 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
2880 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT_TS) | EVENT_INDEX(5));
2881 radeon_ring_write(ring, lower_32_bits(addr));
2882 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
2883 radeon_ring_write(ring, fence->seq);
2884 radeon_ring_write(ring, 0);
2885 } else {
2886
2887 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
2888 radeon_ring_write(ring, cp_coher_cntl);
2889 radeon_ring_write(ring, 0xFFFFFFFF);
2890 radeon_ring_write(ring, 0);
2891 radeon_ring_write(ring, 10);
2892 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
2893 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT) | EVENT_INDEX(0));
2894
2895 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2896 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2897 radeon_ring_write(ring, WAIT_3D_IDLE_bit | WAIT_3D_IDLECLEAN_bit);
2898
2899 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2900 radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2901 radeon_ring_write(ring, fence->seq);
2902
2903 radeon_ring_write(ring, PACKET0(CP_INT_STATUS, 0));
2904 radeon_ring_write(ring, RB_INT_STAT);
2905 }
2906}
2907
2908
2909
2910
2911
2912
2913
2914
2915
2916
2917
2918
2919bool r600_semaphore_ring_emit(struct radeon_device *rdev,
2920 struct radeon_ring *ring,
2921 struct radeon_semaphore *semaphore,
2922 bool emit_wait)
2923{
2924 uint64_t addr = semaphore->gpu_addr;
2925 unsigned sel = emit_wait ? PACKET3_SEM_SEL_WAIT : PACKET3_SEM_SEL_SIGNAL;
2926
2927 if (rdev->family < CHIP_CAYMAN)
2928 sel |= PACKET3_SEM_WAIT_ON_SIGNAL;
2929
2930 radeon_ring_write(ring, PACKET3(PACKET3_MEM_SEMAPHORE, 1));
2931 radeon_ring_write(ring, lower_32_bits(addr));
2932 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | sel);
2933
2934
2935 if (emit_wait && (rdev->family >= CHIP_CEDAR)) {
2936
2937 radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
2938 radeon_ring_write(ring, 0x0);
2939 }
2940
2941 return true;
2942}
2943
2944
2945
2946
2947
2948
2949
2950
2951
2952
2953
2954
2955
2956
2957struct radeon_fence *r600_copy_cpdma(struct radeon_device *rdev,
2958 uint64_t src_offset, uint64_t dst_offset,
2959 unsigned num_gpu_pages,
2960 struct reservation_object *resv)
2961{
2962 struct radeon_fence *fence;
2963 struct radeon_sync sync;
2964 int ring_index = rdev->asic->copy.blit_ring_index;
2965 struct radeon_ring *ring = &rdev->ring[ring_index];
2966 u32 size_in_bytes, cur_size_in_bytes, tmp;
2967 int i, num_loops;
2968 int r = 0;
2969
2970 radeon_sync_create(&sync);
2971
2972 size_in_bytes = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT);
2973 num_loops = DIV_ROUND_UP(size_in_bytes, 0x1fffff);
2974 r = radeon_ring_lock(rdev, ring, num_loops * 6 + 24);
2975 if (r) {
2976 DRM_ERROR("radeon: moving bo (%d).\n", r);
2977 radeon_sync_free(rdev, &sync, NULL);
2978 return ERR_PTR(r);
2979 }
2980
2981 radeon_sync_resv(rdev, &sync, resv, false);
2982 radeon_sync_rings(rdev, &sync, ring->idx);
2983
2984 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2985 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2986 radeon_ring_write(ring, WAIT_3D_IDLE_bit);
2987 for (i = 0; i < num_loops; i++) {
2988 cur_size_in_bytes = size_in_bytes;
2989 if (cur_size_in_bytes > 0x1fffff)
2990 cur_size_in_bytes = 0x1fffff;
2991 size_in_bytes -= cur_size_in_bytes;
2992 tmp = upper_32_bits(src_offset) & 0xff;
2993 if (size_in_bytes == 0)
2994 tmp |= PACKET3_CP_DMA_CP_SYNC;
2995 radeon_ring_write(ring, PACKET3(PACKET3_CP_DMA, 4));
2996 radeon_ring_write(ring, lower_32_bits(src_offset));
2997 radeon_ring_write(ring, tmp);
2998 radeon_ring_write(ring, lower_32_bits(dst_offset));
2999 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
3000 radeon_ring_write(ring, cur_size_in_bytes);
3001 src_offset += cur_size_in_bytes;
3002 dst_offset += cur_size_in_bytes;
3003 }
3004 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3005 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
3006 radeon_ring_write(ring, WAIT_CP_DMA_IDLE_bit);
3007
3008 r = radeon_fence_emit(rdev, &fence, ring->idx);
3009 if (r) {
3010 radeon_ring_unlock_undo(rdev, ring);
3011 radeon_sync_free(rdev, &sync, NULL);
3012 return ERR_PTR(r);
3013 }
3014
3015 radeon_ring_unlock_commit(rdev, ring, false);
3016 radeon_sync_free(rdev, &sync, fence);
3017
3018 return fence;
3019}
3020
3021int r600_set_surface_reg(struct radeon_device *rdev, int reg,
3022 uint32_t tiling_flags, uint32_t pitch,
3023 uint32_t offset, uint32_t obj_size)
3024{
3025
3026 return 0;
3027}
3028
3029void r600_clear_surface_reg(struct radeon_device *rdev, int reg)
3030{
3031
3032}
3033
3034static void r600_uvd_init(struct radeon_device *rdev)
3035{
3036 int r;
3037
3038 if (!rdev->has_uvd)
3039 return;
3040
3041 r = radeon_uvd_init(rdev);
3042 if (r) {
3043 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
3044
3045
3046
3047
3048
3049
3050 rdev->has_uvd = 0;
3051 return;
3052 }
3053 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
3054 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
3055}
3056
3057static void r600_uvd_start(struct radeon_device *rdev)
3058{
3059 int r;
3060
3061 if (!rdev->has_uvd)
3062 return;
3063
3064 r = uvd_v1_0_resume(rdev);
3065 if (r) {
3066 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
3067 goto error;
3068 }
3069 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
3070 if (r) {
3071 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
3072 goto error;
3073 }
3074 return;
3075
3076error:
3077 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
3078}
3079
3080static void r600_uvd_resume(struct radeon_device *rdev)
3081{
3082 struct radeon_ring *ring;
3083 int r;
3084
3085 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
3086 return;
3087
3088 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
3089 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
3090 if (r) {
3091 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
3092 return;
3093 }
3094 r = uvd_v1_0_init(rdev);
3095 if (r) {
3096 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
3097 return;
3098 }
3099}
3100
3101static int r600_startup(struct radeon_device *rdev)
3102{
3103 struct radeon_ring *ring;
3104 int r;
3105
3106
3107 r600_pcie_gen2_enable(rdev);
3108
3109
3110 r = r600_vram_scratch_init(rdev);
3111 if (r)
3112 return r;
3113
3114 r600_mc_program(rdev);
3115
3116 if (rdev->flags & RADEON_IS_AGP) {
3117 r600_agp_enable(rdev);
3118 } else {
3119 r = r600_pcie_gart_enable(rdev);
3120 if (r)
3121 return r;
3122 }
3123 r600_gpu_init(rdev);
3124
3125
3126 r = radeon_wb_init(rdev);
3127 if (r)
3128 return r;
3129
3130 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
3131 if (r) {
3132 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
3133 return r;
3134 }
3135
3136 r600_uvd_start(rdev);
3137
3138
3139 if (!rdev->irq.installed) {
3140 r = radeon_irq_kms_init(rdev);
3141 if (r)
3142 return r;
3143 }
3144
3145 r = r600_irq_init(rdev);
3146 if (r) {
3147 DRM_ERROR("radeon: IH init failed (%d).\n", r);
3148 radeon_irq_kms_fini(rdev);
3149 return r;
3150 }
3151 r600_irq_set(rdev);
3152
3153 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3154 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
3155 RADEON_CP_PACKET2);
3156 if (r)
3157 return r;
3158
3159 r = r600_cp_load_microcode(rdev);
3160 if (r)
3161 return r;
3162 r = r600_cp_resume(rdev);
3163 if (r)
3164 return r;
3165
3166 r600_uvd_resume(rdev);
3167
3168 r = radeon_ib_pool_init(rdev);
3169 if (r) {
3170 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
3171 return r;
3172 }
3173
3174 r = radeon_audio_init(rdev);
3175 if (r) {
3176 DRM_ERROR("radeon: audio init failed\n");
3177 return r;
3178 }
3179
3180 return 0;
3181}
3182
3183void r600_vga_set_state(struct radeon_device *rdev, bool state)
3184{
3185 uint32_t temp;
3186
3187 temp = RREG32(CONFIG_CNTL);
3188 if (state == false) {
3189 temp &= ~(1<<0);
3190 temp |= (1<<1);
3191 } else {
3192 temp &= ~(1<<1);
3193 }
3194 WREG32(CONFIG_CNTL, temp);
3195}
3196
3197int r600_resume(struct radeon_device *rdev)
3198{
3199 int r;
3200
3201
3202
3203
3204
3205
3206 atom_asic_init(rdev->mode_info.atom_context);
3207
3208 if (rdev->pm.pm_method == PM_METHOD_DPM)
3209 radeon_pm_resume(rdev);
3210
3211 rdev->accel_working = true;
3212 r = r600_startup(rdev);
3213 if (r) {
3214 DRM_ERROR("r600 startup failed on resume\n");
3215 rdev->accel_working = false;
3216 return r;
3217 }
3218
3219 return r;
3220}
3221
3222int r600_suspend(struct radeon_device *rdev)
3223{
3224 radeon_pm_suspend(rdev);
3225 radeon_audio_fini(rdev);
3226 r600_cp_stop(rdev);
3227 if (rdev->has_uvd) {
3228 uvd_v1_0_fini(rdev);
3229 radeon_uvd_suspend(rdev);
3230 }
3231 r600_irq_suspend(rdev);
3232 radeon_wb_disable(rdev);
3233 r600_pcie_gart_disable(rdev);
3234
3235 return 0;
3236}
3237
3238
3239
3240
3241
3242
3243
3244int r600_init(struct radeon_device *rdev)
3245{
3246 int r;
3247
3248 if (r600_debugfs_mc_info_init(rdev)) {
3249 DRM_ERROR("Failed to register debugfs file for mc !\n");
3250 }
3251
3252 if (!radeon_get_bios(rdev)) {
3253 if (ASIC_IS_AVIVO(rdev))
3254 return -EINVAL;
3255 }
3256
3257 if (!rdev->is_atom_bios) {
3258 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
3259 return -EINVAL;
3260 }
3261 r = radeon_atombios_init(rdev);
3262 if (r)
3263 return r;
3264
3265 if (!radeon_card_posted(rdev)) {
3266 if (!rdev->bios) {
3267 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
3268 return -EINVAL;
3269 }
3270 DRM_INFO("GPU not posted. posting now...\n");
3271 atom_asic_init(rdev->mode_info.atom_context);
3272 }
3273
3274 r600_scratch_init(rdev);
3275
3276 radeon_surface_init(rdev);
3277
3278 radeon_get_clock_info(rdev->ddev);
3279
3280 r = radeon_fence_driver_init(rdev);
3281 if (r)
3282 return r;
3283 if (rdev->flags & RADEON_IS_AGP) {
3284 r = radeon_agp_init(rdev);
3285 if (r)
3286 radeon_agp_disable(rdev);
3287 }
3288 r = r600_mc_init(rdev);
3289 if (r)
3290 return r;
3291
3292 r = radeon_bo_init(rdev);
3293 if (r)
3294 return r;
3295
3296 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
3297 r = r600_init_microcode(rdev);
3298 if (r) {
3299 DRM_ERROR("Failed to load firmware!\n");
3300 return r;
3301 }
3302 }
3303
3304
3305 radeon_pm_init(rdev);
3306
3307 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
3308 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
3309
3310 r600_uvd_init(rdev);
3311
3312 rdev->ih.ring_obj = NULL;
3313 r600_ih_ring_init(rdev, 64 * 1024);
3314
3315 r = r600_pcie_gart_init(rdev);
3316 if (r)
3317 return r;
3318
3319 rdev->accel_working = true;
3320 r = r600_startup(rdev);
3321 if (r) {
3322 dev_err(rdev->dev, "disabling GPU acceleration\n");
3323 r600_cp_fini(rdev);
3324 r600_irq_fini(rdev);
3325 radeon_wb_fini(rdev);
3326 radeon_ib_pool_fini(rdev);
3327 radeon_irq_kms_fini(rdev);
3328 r600_pcie_gart_fini(rdev);
3329 rdev->accel_working = false;
3330 }
3331
3332 return 0;
3333}
3334
3335void r600_fini(struct radeon_device *rdev)
3336{
3337 radeon_pm_fini(rdev);
3338 radeon_audio_fini(rdev);
3339 r600_cp_fini(rdev);
3340 r600_irq_fini(rdev);
3341 if (rdev->has_uvd) {
3342 uvd_v1_0_fini(rdev);
3343 radeon_uvd_fini(rdev);
3344 }
3345 radeon_wb_fini(rdev);
3346 radeon_ib_pool_fini(rdev);
3347 radeon_irq_kms_fini(rdev);
3348 r600_pcie_gart_fini(rdev);
3349 r600_vram_scratch_fini(rdev);
3350 radeon_agp_fini(rdev);
3351 radeon_gem_fini(rdev);
3352 radeon_fence_driver_fini(rdev);
3353 radeon_bo_fini(rdev);
3354 radeon_atombios_fini(rdev);
3355 kfree(rdev->bios);
3356 rdev->bios = NULL;
3357}
3358
3359
3360
3361
3362
3363void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3364{
3365 struct radeon_ring *ring = &rdev->ring[ib->ring];
3366 u32 next_rptr;
3367
3368 if (ring->rptr_save_reg) {
3369 next_rptr = ring->wptr + 3 + 4;
3370 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3371 radeon_ring_write(ring, ((ring->rptr_save_reg -
3372 PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
3373 radeon_ring_write(ring, next_rptr);
3374 } else if (rdev->wb.enabled) {
3375 next_rptr = ring->wptr + 5 + 4;
3376 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3377 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3378 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3379 radeon_ring_write(ring, next_rptr);
3380 radeon_ring_write(ring, 0);
3381 }
3382
3383 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3384 radeon_ring_write(ring,
3385#ifdef __BIG_ENDIAN
3386 (2 << 0) |
3387#endif
3388 (ib->gpu_addr & 0xFFFFFFFC));
3389 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3390 radeon_ring_write(ring, ib->length_dw);
3391}
3392
3393int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
3394{
3395 struct radeon_ib ib;
3396 uint32_t scratch;
3397 uint32_t tmp = 0;
3398 unsigned i;
3399 int r;
3400
3401 r = radeon_scratch_get(rdev, &scratch);
3402 if (r) {
3403 DRM_ERROR("radeon: failed to get scratch reg (%d).\n", r);
3404 return r;
3405 }
3406 WREG32(scratch, 0xCAFEDEAD);
3407 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
3408 if (r) {
3409 DRM_ERROR("radeon: failed to get ib (%d).\n", r);
3410 goto free_scratch;
3411 }
3412 ib.ptr[0] = PACKET3(PACKET3_SET_CONFIG_REG, 1);
3413 ib.ptr[1] = ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
3414 ib.ptr[2] = 0xDEADBEEF;
3415 ib.length_dw = 3;
3416 r = radeon_ib_schedule(rdev, &ib, NULL, false);
3417 if (r) {
3418 DRM_ERROR("radeon: failed to schedule ib (%d).\n", r);
3419 goto free_ib;
3420 }
3421 r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies(
3422 RADEON_USEC_IB_TEST_TIMEOUT));
3423 if (r < 0) {
3424 DRM_ERROR("radeon: fence wait failed (%d).\n", r);
3425 goto free_ib;
3426 } else if (r == 0) {
3427 DRM_ERROR("radeon: fence wait timed out.\n");
3428 r = -ETIMEDOUT;
3429 goto free_ib;
3430 }
3431 r = 0;
3432 for (i = 0; i < rdev->usec_timeout; i++) {
3433 tmp = RREG32(scratch);
3434 if (tmp == 0xDEADBEEF)
3435 break;
3436 DRM_UDELAY(1);
3437 }
3438 if (i < rdev->usec_timeout) {
3439 DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
3440 } else {
3441 DRM_ERROR("radeon: ib test failed (scratch(0x%04X)=0x%08X)\n",
3442 scratch, tmp);
3443 r = -EINVAL;
3444 }
3445free_ib:
3446 radeon_ib_free(rdev, &ib);
3447free_scratch:
3448 radeon_scratch_free(rdev, scratch);
3449 return r;
3450}
3451
3452
3453
3454
3455
3456
3457
3458
3459
3460
3461
3462
3463void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size)
3464{
3465 u32 rb_bufsz;
3466
3467
3468 rb_bufsz = order_base_2(ring_size / 4);
3469 ring_size = (1 << rb_bufsz) * 4;
3470 rdev->ih.ring_size = ring_size;
3471 rdev->ih.ptr_mask = rdev->ih.ring_size - 1;
3472 rdev->ih.rptr = 0;
3473}
3474
3475int r600_ih_ring_alloc(struct radeon_device *rdev)
3476{
3477 int r;
3478
3479
3480 if (rdev->ih.ring_obj == NULL) {
3481 r = radeon_bo_create(rdev, rdev->ih.ring_size,
3482 PAGE_SIZE, true,
3483 RADEON_GEM_DOMAIN_GTT, 0,
3484 NULL, NULL, &rdev->ih.ring_obj);
3485 if (r) {
3486 DRM_ERROR("radeon: failed to create ih ring buffer (%d).\n", r);
3487 return r;
3488 }
3489 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
3490 if (unlikely(r != 0))
3491 return r;
3492 r = radeon_bo_pin(rdev->ih.ring_obj,
3493 RADEON_GEM_DOMAIN_GTT,
3494 &rdev->ih.gpu_addr);
3495 if (r) {
3496 radeon_bo_unreserve(rdev->ih.ring_obj);
3497 DRM_ERROR("radeon: failed to pin ih ring buffer (%d).\n", r);
3498 return r;
3499 }
3500 r = radeon_bo_kmap(rdev->ih.ring_obj,
3501 (void **)&rdev->ih.ring);
3502 radeon_bo_unreserve(rdev->ih.ring_obj);
3503 if (r) {
3504 DRM_ERROR("radeon: failed to map ih ring buffer (%d).\n", r);
3505 return r;
3506 }
3507 }
3508 return 0;
3509}
3510
3511void r600_ih_ring_fini(struct radeon_device *rdev)
3512{
3513 int r;
3514 if (rdev->ih.ring_obj) {
3515 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
3516 if (likely(r == 0)) {
3517 radeon_bo_kunmap(rdev->ih.ring_obj);
3518 radeon_bo_unpin(rdev->ih.ring_obj);
3519 radeon_bo_unreserve(rdev->ih.ring_obj);
3520 }
3521 radeon_bo_unref(&rdev->ih.ring_obj);
3522 rdev->ih.ring = NULL;
3523 rdev->ih.ring_obj = NULL;
3524 }
3525}
3526
3527void r600_rlc_stop(struct radeon_device *rdev)
3528{
3529
3530 if ((rdev->family >= CHIP_RV770) &&
3531 (rdev->family <= CHIP_RV740)) {
3532
3533 WREG32(SRBM_SOFT_RESET, SOFT_RESET_RLC);
3534 RREG32(SRBM_SOFT_RESET);
3535 mdelay(15);
3536 WREG32(SRBM_SOFT_RESET, 0);
3537 RREG32(SRBM_SOFT_RESET);
3538 }
3539
3540 WREG32(RLC_CNTL, 0);
3541}
3542
3543static void r600_rlc_start(struct radeon_device *rdev)
3544{
3545 WREG32(RLC_CNTL, RLC_ENABLE);
3546}
3547
3548static int r600_rlc_resume(struct radeon_device *rdev)
3549{
3550 u32 i;
3551 const __be32 *fw_data;
3552
3553 if (!rdev->rlc_fw)
3554 return -EINVAL;
3555
3556 r600_rlc_stop(rdev);
3557
3558 WREG32(RLC_HB_CNTL, 0);
3559
3560 WREG32(RLC_HB_BASE, 0);
3561 WREG32(RLC_HB_RPTR, 0);
3562 WREG32(RLC_HB_WPTR, 0);
3563 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
3564 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
3565 WREG32(RLC_MC_CNTL, 0);
3566 WREG32(RLC_UCODE_CNTL, 0);
3567
3568 fw_data = (const __be32 *)rdev->rlc_fw->data;
3569 if (rdev->family >= CHIP_RV770) {
3570 for (i = 0; i < R700_RLC_UCODE_SIZE; i++) {
3571 WREG32(RLC_UCODE_ADDR, i);
3572 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
3573 }
3574 } else {
3575 for (i = 0; i < R600_RLC_UCODE_SIZE; i++) {
3576 WREG32(RLC_UCODE_ADDR, i);
3577 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
3578 }
3579 }
3580 WREG32(RLC_UCODE_ADDR, 0);
3581
3582 r600_rlc_start(rdev);
3583
3584 return 0;
3585}
3586
3587static void r600_enable_interrupts(struct radeon_device *rdev)
3588{
3589 u32 ih_cntl = RREG32(IH_CNTL);
3590 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
3591
3592 ih_cntl |= ENABLE_INTR;
3593 ih_rb_cntl |= IH_RB_ENABLE;
3594 WREG32(IH_CNTL, ih_cntl);
3595 WREG32(IH_RB_CNTL, ih_rb_cntl);
3596 rdev->ih.enabled = true;
3597}
3598
3599void r600_disable_interrupts(struct radeon_device *rdev)
3600{
3601 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
3602 u32 ih_cntl = RREG32(IH_CNTL);
3603
3604 ih_rb_cntl &= ~IH_RB_ENABLE;
3605 ih_cntl &= ~ENABLE_INTR;
3606 WREG32(IH_RB_CNTL, ih_rb_cntl);
3607 WREG32(IH_CNTL, ih_cntl);
3608
3609 WREG32(IH_RB_RPTR, 0);
3610 WREG32(IH_RB_WPTR, 0);
3611 rdev->ih.enabled = false;
3612 rdev->ih.rptr = 0;
3613}
3614
3615static void r600_disable_interrupt_state(struct radeon_device *rdev)
3616{
3617 u32 tmp;
3618
3619 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3620 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3621 WREG32(DMA_CNTL, tmp);
3622 WREG32(GRBM_INT_CNTL, 0);
3623 WREG32(DxMODE_INT_MASK, 0);
3624 WREG32(D1GRPH_INTERRUPT_CONTROL, 0);
3625 WREG32(D2GRPH_INTERRUPT_CONTROL, 0);
3626 if (ASIC_IS_DCE3(rdev)) {
3627 WREG32(DCE3_DACA_AUTODETECT_INT_CONTROL, 0);
3628 WREG32(DCE3_DACB_AUTODETECT_INT_CONTROL, 0);
3629 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3630 WREG32(DC_HPD1_INT_CONTROL, tmp);
3631 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3632 WREG32(DC_HPD2_INT_CONTROL, tmp);
3633 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3634 WREG32(DC_HPD3_INT_CONTROL, tmp);
3635 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3636 WREG32(DC_HPD4_INT_CONTROL, tmp);
3637 if (ASIC_IS_DCE32(rdev)) {
3638 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3639 WREG32(DC_HPD5_INT_CONTROL, tmp);
3640 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3641 WREG32(DC_HPD6_INT_CONTROL, tmp);
3642 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3643 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, tmp);
3644 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3645 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, tmp);
3646 } else {
3647 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3648 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
3649 tmp = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3650 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, tmp);
3651 }
3652 } else {
3653 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
3654 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3655 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3656 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
3657 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3658 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
3659 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3660 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
3661 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3662 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
3663 tmp = RREG32(HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3664 WREG32(HDMI1_AUDIO_PACKET_CONTROL, tmp);
3665 }
3666}
3667
3668int r600_irq_init(struct radeon_device *rdev)
3669{
3670 int ret = 0;
3671 int rb_bufsz;
3672 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
3673
3674
3675 ret = r600_ih_ring_alloc(rdev);
3676 if (ret)
3677 return ret;
3678
3679
3680 r600_disable_interrupts(rdev);
3681
3682
3683 if (rdev->family >= CHIP_CEDAR)
3684 ret = evergreen_rlc_resume(rdev);
3685 else
3686 ret = r600_rlc_resume(rdev);
3687 if (ret) {
3688 r600_ih_ring_fini(rdev);
3689 return ret;
3690 }
3691
3692
3693
3694 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8);
3695 interrupt_cntl = RREG32(INTERRUPT_CNTL);
3696
3697
3698
3699 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
3700
3701 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
3702 WREG32(INTERRUPT_CNTL, interrupt_cntl);
3703
3704 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
3705 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
3706
3707 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
3708 IH_WPTR_OVERFLOW_CLEAR |
3709 (rb_bufsz << 1));
3710
3711 if (rdev->wb.enabled)
3712 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
3713
3714
3715 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
3716 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
3717
3718 WREG32(IH_RB_CNTL, ih_rb_cntl);
3719
3720
3721 WREG32(IH_RB_RPTR, 0);
3722 WREG32(IH_RB_WPTR, 0);
3723
3724
3725 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10);
3726
3727 if (rdev->msi_enabled)
3728 ih_cntl |= RPTR_REARM;
3729 WREG32(IH_CNTL, ih_cntl);
3730
3731
3732 if (rdev->family >= CHIP_CEDAR)
3733 evergreen_disable_interrupt_state(rdev);
3734 else
3735 r600_disable_interrupt_state(rdev);
3736
3737
3738 pci_set_master(rdev->pdev);
3739
3740
3741 r600_enable_interrupts(rdev);
3742
3743 return ret;
3744}
3745
3746void r600_irq_suspend(struct radeon_device *rdev)
3747{
3748 r600_irq_disable(rdev);
3749 r600_rlc_stop(rdev);
3750}
3751
3752void r600_irq_fini(struct radeon_device *rdev)
3753{
3754 r600_irq_suspend(rdev);
3755 r600_ih_ring_fini(rdev);
3756}
3757
3758int r600_irq_set(struct radeon_device *rdev)
3759{
3760 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3761 u32 mode_int = 0;
3762 u32 hpd1, hpd2, hpd3, hpd4 = 0, hpd5 = 0, hpd6 = 0;
3763 u32 grbm_int_cntl = 0;
3764 u32 hdmi0, hdmi1;
3765 u32 dma_cntl;
3766 u32 thermal_int = 0;
3767
3768 if (!rdev->irq.installed) {
3769 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3770 return -EINVAL;
3771 }
3772
3773 if (!rdev->ih.enabled) {
3774 r600_disable_interrupts(rdev);
3775
3776 r600_disable_interrupt_state(rdev);
3777 return 0;
3778 }
3779
3780 if (ASIC_IS_DCE3(rdev)) {
3781 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3782 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3783 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3784 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3785 if (ASIC_IS_DCE32(rdev)) {
3786 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3787 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3788 hdmi0 = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3789 hdmi1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3790 } else {
3791 hdmi0 = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3792 hdmi1 = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3793 }
3794 } else {
3795 hpd1 = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3796 hpd2 = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3797 hpd3 = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3798 hdmi0 = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3799 hdmi1 = RREG32(HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3800 }
3801
3802 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3803
3804 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) {
3805 thermal_int = RREG32(CG_THERMAL_INT) &
3806 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
3807 } else if (rdev->family >= CHIP_RV770) {
3808 thermal_int = RREG32(RV770_CG_THERMAL_INT) &
3809 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
3810 }
3811 if (rdev->irq.dpm_thermal) {
3812 DRM_DEBUG("dpm thermal\n");
3813 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
3814 }
3815
3816 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3817 DRM_DEBUG("r600_irq_set: sw int\n");
3818 cp_int_cntl |= RB_INT_ENABLE;
3819 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3820 }
3821
3822 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3823 DRM_DEBUG("r600_irq_set: sw int dma\n");
3824 dma_cntl |= TRAP_ENABLE;
3825 }
3826
3827 if (rdev->irq.crtc_vblank_int[0] ||
3828 atomic_read(&rdev->irq.pflip[0])) {
3829 DRM_DEBUG("r600_irq_set: vblank 0\n");
3830 mode_int |= D1MODE_VBLANK_INT_MASK;
3831 }
3832 if (rdev->irq.crtc_vblank_int[1] ||
3833 atomic_read(&rdev->irq.pflip[1])) {
3834 DRM_DEBUG("r600_irq_set: vblank 1\n");
3835 mode_int |= D2MODE_VBLANK_INT_MASK;
3836 }
3837 if (rdev->irq.hpd[0]) {
3838 DRM_DEBUG("r600_irq_set: hpd 1\n");
3839 hpd1 |= DC_HPDx_INT_EN;
3840 }
3841 if (rdev->irq.hpd[1]) {
3842 DRM_DEBUG("r600_irq_set: hpd 2\n");
3843 hpd2 |= DC_HPDx_INT_EN;
3844 }
3845 if (rdev->irq.hpd[2]) {
3846 DRM_DEBUG("r600_irq_set: hpd 3\n");
3847 hpd3 |= DC_HPDx_INT_EN;
3848 }
3849 if (rdev->irq.hpd[3]) {
3850 DRM_DEBUG("r600_irq_set: hpd 4\n");
3851 hpd4 |= DC_HPDx_INT_EN;
3852 }
3853 if (rdev->irq.hpd[4]) {
3854 DRM_DEBUG("r600_irq_set: hpd 5\n");
3855 hpd5 |= DC_HPDx_INT_EN;
3856 }
3857 if (rdev->irq.hpd[5]) {
3858 DRM_DEBUG("r600_irq_set: hpd 6\n");
3859 hpd6 |= DC_HPDx_INT_EN;
3860 }
3861 if (rdev->irq.afmt[0]) {
3862 DRM_DEBUG("r600_irq_set: hdmi 0\n");
3863 hdmi0 |= HDMI0_AZ_FORMAT_WTRIG_MASK;
3864 }
3865 if (rdev->irq.afmt[1]) {
3866 DRM_DEBUG("r600_irq_set: hdmi 0\n");
3867 hdmi1 |= HDMI0_AZ_FORMAT_WTRIG_MASK;
3868 }
3869
3870 WREG32(CP_INT_CNTL, cp_int_cntl);
3871 WREG32(DMA_CNTL, dma_cntl);
3872 WREG32(DxMODE_INT_MASK, mode_int);
3873 WREG32(D1GRPH_INTERRUPT_CONTROL, DxGRPH_PFLIP_INT_MASK);
3874 WREG32(D2GRPH_INTERRUPT_CONTROL, DxGRPH_PFLIP_INT_MASK);
3875 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3876 if (ASIC_IS_DCE3(rdev)) {
3877 WREG32(DC_HPD1_INT_CONTROL, hpd1);
3878 WREG32(DC_HPD2_INT_CONTROL, hpd2);
3879 WREG32(DC_HPD3_INT_CONTROL, hpd3);
3880 WREG32(DC_HPD4_INT_CONTROL, hpd4);
3881 if (ASIC_IS_DCE32(rdev)) {
3882 WREG32(DC_HPD5_INT_CONTROL, hpd5);
3883 WREG32(DC_HPD6_INT_CONTROL, hpd6);
3884 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, hdmi0);
3885 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, hdmi1);
3886 } else {
3887 WREG32(HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
3888 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, hdmi1);
3889 }
3890 } else {
3891 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, hpd1);
3892 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, hpd2);
3893 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, hpd3);
3894 WREG32(HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
3895 WREG32(HDMI1_AUDIO_PACKET_CONTROL, hdmi1);
3896 }
3897 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) {
3898 WREG32(CG_THERMAL_INT, thermal_int);
3899 } else if (rdev->family >= CHIP_RV770) {
3900 WREG32(RV770_CG_THERMAL_INT, thermal_int);
3901 }
3902
3903
3904 RREG32(R_000E50_SRBM_STATUS);
3905
3906 return 0;
3907}
3908
3909static void r600_irq_ack(struct radeon_device *rdev)
3910{
3911 u32 tmp;
3912
3913 if (ASIC_IS_DCE3(rdev)) {
3914 rdev->irq.stat_regs.r600.disp_int = RREG32(DCE3_DISP_INTERRUPT_STATUS);
3915 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE);
3916 rdev->irq.stat_regs.r600.disp_int_cont2 = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE2);
3917 if (ASIC_IS_DCE32(rdev)) {
3918 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET0);
3919 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET1);
3920 } else {
3921 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3922 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(DCE3_HDMI1_STATUS);
3923 }
3924 } else {
3925 rdev->irq.stat_regs.r600.disp_int = RREG32(DISP_INTERRUPT_STATUS);
3926 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
3927 rdev->irq.stat_regs.r600.disp_int_cont2 = 0;
3928 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3929 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(HDMI1_STATUS);
3930 }
3931 rdev->irq.stat_regs.r600.d1grph_int = RREG32(D1GRPH_INTERRUPT_STATUS);
3932 rdev->irq.stat_regs.r600.d2grph_int = RREG32(D2GRPH_INTERRUPT_STATUS);
3933
3934 if (rdev->irq.stat_regs.r600.d1grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3935 WREG32(D1GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3936 if (rdev->irq.stat_regs.r600.d2grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3937 WREG32(D2GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3938 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT)
3939 WREG32(D1MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3940 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT)
3941 WREG32(D1MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3942 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT)
3943 WREG32(D2MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3944 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT)
3945 WREG32(D2MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3946 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) {
3947 if (ASIC_IS_DCE3(rdev)) {
3948 tmp = RREG32(DC_HPD1_INT_CONTROL);
3949 tmp |= DC_HPDx_INT_ACK;
3950 WREG32(DC_HPD1_INT_CONTROL, tmp);
3951 } else {
3952 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
3953 tmp |= DC_HPDx_INT_ACK;
3954 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
3955 }
3956 }
3957 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) {
3958 if (ASIC_IS_DCE3(rdev)) {
3959 tmp = RREG32(DC_HPD2_INT_CONTROL);
3960 tmp |= DC_HPDx_INT_ACK;
3961 WREG32(DC_HPD2_INT_CONTROL, tmp);
3962 } else {
3963 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
3964 tmp |= DC_HPDx_INT_ACK;
3965 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
3966 }
3967 }
3968 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) {
3969 if (ASIC_IS_DCE3(rdev)) {
3970 tmp = RREG32(DC_HPD3_INT_CONTROL);
3971 tmp |= DC_HPDx_INT_ACK;
3972 WREG32(DC_HPD3_INT_CONTROL, tmp);
3973 } else {
3974 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
3975 tmp |= DC_HPDx_INT_ACK;
3976 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
3977 }
3978 }
3979 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) {
3980 tmp = RREG32(DC_HPD4_INT_CONTROL);
3981 tmp |= DC_HPDx_INT_ACK;
3982 WREG32(DC_HPD4_INT_CONTROL, tmp);
3983 }
3984 if (ASIC_IS_DCE32(rdev)) {
3985 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) {
3986 tmp = RREG32(DC_HPD5_INT_CONTROL);
3987 tmp |= DC_HPDx_INT_ACK;
3988 WREG32(DC_HPD5_INT_CONTROL, tmp);
3989 }
3990 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) {
3991 tmp = RREG32(DC_HPD6_INT_CONTROL);
3992 tmp |= DC_HPDx_INT_ACK;
3993 WREG32(DC_HPD6_INT_CONTROL, tmp);
3994 }
3995 if (rdev->irq.stat_regs.r600.hdmi0_status & AFMT_AZ_FORMAT_WTRIG) {
3996 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0);
3997 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
3998 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, tmp);
3999 }
4000 if (rdev->irq.stat_regs.r600.hdmi1_status & AFMT_AZ_FORMAT_WTRIG) {
4001 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1);
4002 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4003 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, tmp);
4004 }
4005 } else {
4006 if (rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG) {
4007 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL);
4008 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
4009 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
4010 }
4011 if (rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG) {
4012 if (ASIC_IS_DCE3(rdev)) {
4013 tmp = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL);
4014 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
4015 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, tmp);
4016 } else {
4017 tmp = RREG32(HDMI1_AUDIO_PACKET_CONTROL);
4018 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
4019 WREG32(HDMI1_AUDIO_PACKET_CONTROL, tmp);
4020 }
4021 }
4022 }
4023}
4024
4025void r600_irq_disable(struct radeon_device *rdev)
4026{
4027 r600_disable_interrupts(rdev);
4028
4029 mdelay(1);
4030 r600_irq_ack(rdev);
4031 r600_disable_interrupt_state(rdev);
4032}
4033
4034static u32 r600_get_ih_wptr(struct radeon_device *rdev)
4035{
4036 u32 wptr, tmp;
4037
4038 if (rdev->wb.enabled)
4039 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4040 else
4041 wptr = RREG32(IH_RB_WPTR);
4042
4043 if (wptr & RB_OVERFLOW) {
4044 wptr &= ~RB_OVERFLOW;
4045
4046
4047
4048
4049 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4050 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4051 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4052 tmp = RREG32(IH_RB_CNTL);
4053 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4054 WREG32(IH_RB_CNTL, tmp);
4055 }
4056 return (wptr & rdev->ih.ptr_mask);
4057}
4058
4059
4060
4061
4062
4063
4064
4065
4066
4067
4068
4069
4070
4071
4072
4073
4074
4075
4076
4077
4078
4079
4080
4081
4082
4083
4084
4085
4086
4087
4088
4089int r600_irq_process(struct radeon_device *rdev)
4090{
4091 u32 wptr;
4092 u32 rptr;
4093 u32 src_id, src_data;
4094 u32 ring_index;
4095 bool queue_hotplug = false;
4096 bool queue_hdmi = false;
4097 bool queue_thermal = false;
4098
4099 if (!rdev->ih.enabled || rdev->shutdown)
4100 return IRQ_NONE;
4101
4102
4103 if (!rdev->msi_enabled)
4104 RREG32(IH_RB_WPTR);
4105
4106 wptr = r600_get_ih_wptr(rdev);
4107
4108restart_ih:
4109
4110 if (atomic_xchg(&rdev->ih.lock, 1))
4111 return IRQ_NONE;
4112
4113 rptr = rdev->ih.rptr;
4114 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4115
4116
4117 rmb();
4118
4119
4120 r600_irq_ack(rdev);
4121
4122 while (rptr != wptr) {
4123
4124 ring_index = rptr / 4;
4125 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4126 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4127
4128 switch (src_id) {
4129 case 1:
4130 switch (src_data) {
4131 case 0:
4132 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT))
4133 DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
4134
4135 if (rdev->irq.crtc_vblank_int[0]) {
4136 drm_handle_vblank(rdev->ddev, 0);
4137 rdev->pm.vblank_sync = true;
4138 wake_up(&rdev->irq.vblank_queue);
4139 }
4140 if (atomic_read(&rdev->irq.pflip[0]))
4141 radeon_crtc_handle_vblank(rdev, 0);
4142 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4143 DRM_DEBUG("IH: D1 vblank\n");
4144
4145 break;
4146 case 1:
4147 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT))
4148 DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
4149
4150 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4151 DRM_DEBUG("IH: D1 vline\n");
4152
4153 break;
4154 default:
4155 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4156 break;
4157 }
4158 break;
4159 case 5:
4160 switch (src_data) {
4161 case 0:
4162 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT))
4163 DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
4164
4165 if (rdev->irq.crtc_vblank_int[1]) {
4166 drm_handle_vblank(rdev->ddev, 1);
4167 rdev->pm.vblank_sync = true;
4168 wake_up(&rdev->irq.vblank_queue);
4169 }
4170 if (atomic_read(&rdev->irq.pflip[1]))
4171 radeon_crtc_handle_vblank(rdev, 1);
4172 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VBLANK_INTERRUPT;
4173 DRM_DEBUG("IH: D2 vblank\n");
4174
4175 break;
4176 case 1:
4177 if (!(rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT))
4178 DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
4179
4180 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VLINE_INTERRUPT;
4181 DRM_DEBUG("IH: D2 vline\n");
4182
4183 break;
4184 default:
4185 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4186 break;
4187 }
4188 break;
4189 case 9:
4190 DRM_DEBUG("IH: D1 flip\n");
4191 if (radeon_use_pflipirq > 0)
4192 radeon_crtc_handle_flip(rdev, 0);
4193 break;
4194 case 11:
4195 DRM_DEBUG("IH: D2 flip\n");
4196 if (radeon_use_pflipirq > 0)
4197 radeon_crtc_handle_flip(rdev, 1);
4198 break;
4199 case 19:
4200 switch (src_data) {
4201 case 0:
4202 if (!(rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT))
4203 DRM_DEBUG("IH: HPD1 - IH event w/o asserted irq bit?\n");
4204
4205 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD1_INTERRUPT;
4206 queue_hotplug = true;
4207 DRM_DEBUG("IH: HPD1\n");
4208 break;
4209 case 1:
4210 if (!(rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT))
4211 DRM_DEBUG("IH: HPD2 - IH event w/o asserted irq bit?\n");
4212
4213 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD2_INTERRUPT;
4214 queue_hotplug = true;
4215 DRM_DEBUG("IH: HPD2\n");
4216 break;
4217 case 4:
4218 if (!(rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT))
4219 DRM_DEBUG("IH: HPD3 - IH event w/o asserted irq bit?\n");
4220
4221 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD3_INTERRUPT;
4222 queue_hotplug = true;
4223 DRM_DEBUG("IH: HPD3\n");
4224 break;
4225 case 5:
4226 if (!(rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT))
4227 DRM_DEBUG("IH: HPD4 - IH event w/o asserted irq bit?\n");
4228
4229 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD4_INTERRUPT;
4230 queue_hotplug = true;
4231 DRM_DEBUG("IH: HPD4\n");
4232 break;
4233 case 10:
4234 if (!(rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT))
4235 DRM_DEBUG("IH: HPD5 - IH event w/o asserted irq bit?\n");
4236
4237 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD5_INTERRUPT;
4238 queue_hotplug = true;
4239 DRM_DEBUG("IH: HPD5\n");
4240 break;
4241 case 12:
4242 if (!(rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT))
4243 DRM_DEBUG("IH: HPD6 - IH event w/o asserted irq bit?\n");
4244
4245 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD6_INTERRUPT;
4246 queue_hotplug = true;
4247 DRM_DEBUG("IH: HPD6\n");
4248
4249 break;
4250 default:
4251 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4252 break;
4253 }
4254 break;
4255 case 21:
4256 switch (src_data) {
4257 case 4:
4258 if (!(rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG))
4259 DRM_DEBUG("IH: HDMI0 - IH event w/o asserted irq bit?\n");
4260
4261 rdev->irq.stat_regs.r600.hdmi0_status &= ~HDMI0_AZ_FORMAT_WTRIG;
4262 queue_hdmi = true;
4263 DRM_DEBUG("IH: HDMI0\n");
4264
4265 break;
4266 case 5:
4267 if (!(rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG))
4268 DRM_DEBUG("IH: HDMI1 - IH event w/o asserted irq bit?\n");
4269
4270 rdev->irq.stat_regs.r600.hdmi1_status &= ~HDMI0_AZ_FORMAT_WTRIG;
4271 queue_hdmi = true;
4272 DRM_DEBUG("IH: HDMI1\n");
4273
4274 break;
4275 default:
4276 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4277 break;
4278 }
4279 break;
4280 case 124:
4281 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4282 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4283 break;
4284 case 176:
4285 case 177:
4286 case 178:
4287 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4288 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4289 break;
4290 case 181:
4291 DRM_DEBUG("IH: CP EOP\n");
4292 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4293 break;
4294 case 224:
4295 DRM_DEBUG("IH: DMA trap\n");
4296 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4297 break;
4298 case 230:
4299 DRM_DEBUG("IH: thermal low to high\n");
4300 rdev->pm.dpm.thermal.high_to_low = false;
4301 queue_thermal = true;
4302 break;
4303 case 231:
4304 DRM_DEBUG("IH: thermal high to low\n");
4305 rdev->pm.dpm.thermal.high_to_low = true;
4306 queue_thermal = true;
4307 break;
4308 case 233:
4309 DRM_DEBUG("IH: GUI idle\n");
4310 break;
4311 default:
4312 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4313 break;
4314 }
4315
4316
4317 rptr += 16;
4318 rptr &= rdev->ih.ptr_mask;
4319 WREG32(IH_RB_RPTR, rptr);
4320 }
4321 if (queue_hotplug)
4322 schedule_delayed_work(&rdev->hotplug_work, 0);
4323 if (queue_hdmi)
4324 schedule_work(&rdev->audio_work);
4325 if (queue_thermal && rdev->pm.dpm_enabled)
4326 schedule_work(&rdev->pm.dpm.thermal.work);
4327 rdev->ih.rptr = rptr;
4328 atomic_set(&rdev->ih.lock, 0);
4329
4330
4331 wptr = r600_get_ih_wptr(rdev);
4332 if (wptr != rptr)
4333 goto restart_ih;
4334
4335 return IRQ_HANDLED;
4336}
4337
4338
4339
4340
4341#if defined(CONFIG_DEBUG_FS)
4342
4343static int r600_debugfs_mc_info(struct seq_file *m, void *data)
4344{
4345 struct drm_info_node *node = (struct drm_info_node *) m->private;
4346 struct drm_device *dev = node->minor->dev;
4347 struct radeon_device *rdev = dev->dev_private;
4348
4349 DREG32_SYS(m, rdev, R_000E50_SRBM_STATUS);
4350 DREG32_SYS(m, rdev, VM_L2_STATUS);
4351 return 0;
4352}
4353
4354static struct drm_info_list r600_mc_info_list[] = {
4355 {"r600_mc_info", r600_debugfs_mc_info, 0, NULL},
4356};
4357#endif
4358
4359int r600_debugfs_mc_info_init(struct radeon_device *rdev)
4360{
4361#if defined(CONFIG_DEBUG_FS)
4362 return radeon_debugfs_add_files(rdev, r600_mc_info_list, ARRAY_SIZE(r600_mc_info_list));
4363#else
4364 return 0;
4365#endif
4366}
4367
4368
4369
4370
4371
4372
4373
4374
4375
4376
4377void r600_mmio_hdp_flush(struct radeon_device *rdev)
4378{
4379
4380
4381
4382
4383
4384 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
4385 rdev->vram_scratch.ptr && !(rdev->flags & RADEON_IS_AGP)) {
4386 void __iomem *ptr = (void *)rdev->vram_scratch.ptr;
4387 u32 tmp;
4388
4389 WREG32(HDP_DEBUG1, 0);
4390 tmp = readl((void __iomem *)ptr);
4391 } else
4392 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
4393}
4394
4395void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes)
4396{
4397 u32 link_width_cntl, mask;
4398
4399 if (rdev->flags & RADEON_IS_IGP)
4400 return;
4401
4402 if (!(rdev->flags & RADEON_IS_PCIE))
4403 return;
4404
4405
4406 if (ASIC_IS_X2(rdev))
4407 return;
4408
4409 radeon_gui_idle(rdev);
4410
4411 switch (lanes) {
4412 case 0:
4413 mask = RADEON_PCIE_LC_LINK_WIDTH_X0;
4414 break;
4415 case 1:
4416 mask = RADEON_PCIE_LC_LINK_WIDTH_X1;
4417 break;
4418 case 2:
4419 mask = RADEON_PCIE_LC_LINK_WIDTH_X2;
4420 break;
4421 case 4:
4422 mask = RADEON_PCIE_LC_LINK_WIDTH_X4;
4423 break;
4424 case 8:
4425 mask = RADEON_PCIE_LC_LINK_WIDTH_X8;
4426 break;
4427 case 12:
4428
4429 mask = RADEON_PCIE_LC_LINK_WIDTH_X12;
4430 break;
4431 case 16:
4432 mask = RADEON_PCIE_LC_LINK_WIDTH_X16;
4433 break;
4434 default:
4435 DRM_ERROR("invalid pcie lane request: %d\n", lanes);
4436 return;
4437 }
4438
4439 link_width_cntl = RREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
4440 link_width_cntl &= ~RADEON_PCIE_LC_LINK_WIDTH_MASK;
4441 link_width_cntl |= mask << RADEON_PCIE_LC_LINK_WIDTH_SHIFT;
4442 link_width_cntl |= (RADEON_PCIE_LC_RECONFIG_NOW |
4443 R600_PCIE_LC_RECONFIG_ARC_MISSING_ESCAPE);
4444
4445 WREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4446}
4447
4448int r600_get_pcie_lanes(struct radeon_device *rdev)
4449{
4450 u32 link_width_cntl;
4451
4452 if (rdev->flags & RADEON_IS_IGP)
4453 return 0;
4454
4455 if (!(rdev->flags & RADEON_IS_PCIE))
4456 return 0;
4457
4458
4459 if (ASIC_IS_X2(rdev))
4460 return 0;
4461
4462 radeon_gui_idle(rdev);
4463
4464 link_width_cntl = RREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
4465
4466 switch ((link_width_cntl & RADEON_PCIE_LC_LINK_WIDTH_RD_MASK) >> RADEON_PCIE_LC_LINK_WIDTH_RD_SHIFT) {
4467 case RADEON_PCIE_LC_LINK_WIDTH_X1:
4468 return 1;
4469 case RADEON_PCIE_LC_LINK_WIDTH_X2:
4470 return 2;
4471 case RADEON_PCIE_LC_LINK_WIDTH_X4:
4472 return 4;
4473 case RADEON_PCIE_LC_LINK_WIDTH_X8:
4474 return 8;
4475 case RADEON_PCIE_LC_LINK_WIDTH_X12:
4476
4477 return 12;
4478 case RADEON_PCIE_LC_LINK_WIDTH_X0:
4479 case RADEON_PCIE_LC_LINK_WIDTH_X16:
4480 default:
4481 return 16;
4482 }
4483}
4484
4485static void r600_pcie_gen2_enable(struct radeon_device *rdev)
4486{
4487 u32 link_width_cntl, lanes, speed_cntl, training_cntl, tmp;
4488 u16 link_cntl2;
4489
4490 if (radeon_pcie_gen2 == 0)
4491 return;
4492
4493 if (rdev->flags & RADEON_IS_IGP)
4494 return;
4495
4496 if (!(rdev->flags & RADEON_IS_PCIE))
4497 return;
4498
4499
4500 if (ASIC_IS_X2(rdev))
4501 return;
4502
4503
4504 if (rdev->family <= CHIP_R600)
4505 return;
4506
4507 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
4508 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
4509 return;
4510
4511 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4512 if (speed_cntl & LC_CURRENT_DATA_RATE) {
4513 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
4514 return;
4515 }
4516
4517 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
4518
4519
4520 if ((rdev->family == CHIP_RV670) ||
4521 (rdev->family == CHIP_RV620) ||
4522 (rdev->family == CHIP_RV635)) {
4523
4524 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4525 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
4526 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4527 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4528 if (link_width_cntl & LC_RENEGOTIATION_SUPPORT) {
4529 lanes = (link_width_cntl & LC_LINK_WIDTH_RD_MASK) >> LC_LINK_WIDTH_RD_SHIFT;
4530 link_width_cntl &= ~(LC_LINK_WIDTH_MASK |
4531 LC_RECONFIG_ARC_MISSING_ESCAPE);
4532 link_width_cntl |= lanes | LC_RECONFIG_NOW | LC_RENEGOTIATE_EN;
4533 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4534 } else {
4535 link_width_cntl |= LC_UPCONFIGURE_DIS;
4536 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4537 }
4538 }
4539
4540 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4541 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
4542 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
4543
4544
4545 if ((rdev->family == CHIP_RV670) ||
4546 (rdev->family == CHIP_RV620) ||
4547 (rdev->family == CHIP_RV635)) {
4548 WREG32(MM_CFGREGS_CNTL, 0x8);
4549 link_cntl2 = RREG32(0x4088);
4550 WREG32(MM_CFGREGS_CNTL, 0);
4551
4552 if (link_cntl2 & SELECTABLE_DEEMPHASIS)
4553 return;
4554 }
4555
4556 speed_cntl &= ~LC_SPEED_CHANGE_ATTEMPTS_ALLOWED_MASK;
4557 speed_cntl |= (0x3 << LC_SPEED_CHANGE_ATTEMPTS_ALLOWED_SHIFT);
4558 speed_cntl &= ~LC_VOLTAGE_TIMER_SEL_MASK;
4559 speed_cntl &= ~LC_FORCE_DIS_HW_SPEED_CHANGE;
4560 speed_cntl |= LC_FORCE_EN_HW_SPEED_CHANGE;
4561 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4562
4563 tmp = RREG32(0x541c);
4564 WREG32(0x541c, tmp | 0x8);
4565 WREG32(MM_CFGREGS_CNTL, MM_WR_TO_CFG_EN);
4566 link_cntl2 = RREG16(0x4088);
4567 link_cntl2 &= ~TARGET_LINK_SPEED_MASK;
4568 link_cntl2 |= 0x2;
4569 WREG16(0x4088, link_cntl2);
4570 WREG32(MM_CFGREGS_CNTL, 0);
4571
4572 if ((rdev->family == CHIP_RV670) ||
4573 (rdev->family == CHIP_RV620) ||
4574 (rdev->family == CHIP_RV635)) {
4575 training_cntl = RREG32_PCIE_PORT(PCIE_LC_TRAINING_CNTL);
4576 training_cntl &= ~LC_POINT_7_PLUS_EN;
4577 WREG32_PCIE_PORT(PCIE_LC_TRAINING_CNTL, training_cntl);
4578 } else {
4579 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4580 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
4581 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4582 }
4583
4584 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4585 speed_cntl |= LC_GEN2_EN_STRAP;
4586 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4587
4588 } else {
4589 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4590
4591 if (1)
4592 link_width_cntl |= LC_UPCONFIGURE_DIS;
4593 else
4594 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
4595 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4596 }
4597}
4598
4599
4600
4601
4602
4603
4604
4605
4606
4607uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev)
4608{
4609 uint64_t clock;
4610
4611 mutex_lock(&rdev->gpu_clock_mutex);
4612 WREG32(RLC_CAPTURE_GPU_CLOCK_COUNT, 1);
4613 clock = (uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_LSB) |
4614 ((uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_MSB) << 32ULL);
4615 mutex_unlock(&rdev->gpu_clock_mutex);
4616 return clock;
4617}
4618