1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <linux/slab.h>
29#include <linux/seq_file.h>
30#include <linux/firmware.h>
31#include <linux/module.h>
32#include <drm/drmP.h>
33#include <drm/radeon_drm.h>
34#include "radeon.h"
35#include "radeon_asic.h"
36#include "radeon_mode.h"
37#include "r600d.h"
38#include "atom.h"
39#include "avivod.h"
40#include "radeon_ucode.h"
41
42
43MODULE_FIRMWARE("radeon/R600_pfp.bin");
44MODULE_FIRMWARE("radeon/R600_me.bin");
45MODULE_FIRMWARE("radeon/RV610_pfp.bin");
46MODULE_FIRMWARE("radeon/RV610_me.bin");
47MODULE_FIRMWARE("radeon/RV630_pfp.bin");
48MODULE_FIRMWARE("radeon/RV630_me.bin");
49MODULE_FIRMWARE("radeon/RV620_pfp.bin");
50MODULE_FIRMWARE("radeon/RV620_me.bin");
51MODULE_FIRMWARE("radeon/RV635_pfp.bin");
52MODULE_FIRMWARE("radeon/RV635_me.bin");
53MODULE_FIRMWARE("radeon/RV670_pfp.bin");
54MODULE_FIRMWARE("radeon/RV670_me.bin");
55MODULE_FIRMWARE("radeon/RS780_pfp.bin");
56MODULE_FIRMWARE("radeon/RS780_me.bin");
57MODULE_FIRMWARE("radeon/RV770_pfp.bin");
58MODULE_FIRMWARE("radeon/RV770_me.bin");
59MODULE_FIRMWARE("radeon/RV770_smc.bin");
60MODULE_FIRMWARE("radeon/RV730_pfp.bin");
61MODULE_FIRMWARE("radeon/RV730_me.bin");
62MODULE_FIRMWARE("radeon/RV730_smc.bin");
63MODULE_FIRMWARE("radeon/RV740_smc.bin");
64MODULE_FIRMWARE("radeon/RV710_pfp.bin");
65MODULE_FIRMWARE("radeon/RV710_me.bin");
66MODULE_FIRMWARE("radeon/RV710_smc.bin");
67MODULE_FIRMWARE("radeon/R600_rlc.bin");
68MODULE_FIRMWARE("radeon/R700_rlc.bin");
69MODULE_FIRMWARE("radeon/CEDAR_pfp.bin");
70MODULE_FIRMWARE("radeon/CEDAR_me.bin");
71MODULE_FIRMWARE("radeon/CEDAR_rlc.bin");
72MODULE_FIRMWARE("radeon/CEDAR_smc.bin");
73MODULE_FIRMWARE("radeon/REDWOOD_pfp.bin");
74MODULE_FIRMWARE("radeon/REDWOOD_me.bin");
75MODULE_FIRMWARE("radeon/REDWOOD_rlc.bin");
76MODULE_FIRMWARE("radeon/REDWOOD_smc.bin");
77MODULE_FIRMWARE("radeon/JUNIPER_pfp.bin");
78MODULE_FIRMWARE("radeon/JUNIPER_me.bin");
79MODULE_FIRMWARE("radeon/JUNIPER_rlc.bin");
80MODULE_FIRMWARE("radeon/JUNIPER_smc.bin");
81MODULE_FIRMWARE("radeon/CYPRESS_pfp.bin");
82MODULE_FIRMWARE("radeon/CYPRESS_me.bin");
83MODULE_FIRMWARE("radeon/CYPRESS_rlc.bin");
84MODULE_FIRMWARE("radeon/CYPRESS_smc.bin");
85MODULE_FIRMWARE("radeon/PALM_pfp.bin");
86MODULE_FIRMWARE("radeon/PALM_me.bin");
87MODULE_FIRMWARE("radeon/SUMO_rlc.bin");
88MODULE_FIRMWARE("radeon/SUMO_pfp.bin");
89MODULE_FIRMWARE("radeon/SUMO_me.bin");
90MODULE_FIRMWARE("radeon/SUMO2_pfp.bin");
91MODULE_FIRMWARE("radeon/SUMO2_me.bin");
92
93static const u32 crtc_offsets[2] =
94{
95 0,
96 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL
97};
98
99int r600_debugfs_mc_info_init(struct radeon_device *rdev);
100
101
102int r600_mc_wait_for_idle(struct radeon_device *rdev);
103static void r600_gpu_init(struct radeon_device *rdev);
104void r600_fini(struct radeon_device *rdev);
105void r600_irq_disable(struct radeon_device *rdev);
106static void r600_pcie_gen2_enable(struct radeon_device *rdev);
107extern int evergreen_rlc_resume(struct radeon_device *rdev);
108
109
110
111
112
113
114
115
116
117u32 r600_get_xclk(struct radeon_device *rdev)
118{
119 return rdev->clock.spll.reference_freq;
120}
121
122int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
123{
124 return 0;
125}
126
127
128int rv6xx_get_temp(struct radeon_device *rdev)
129{
130 u32 temp = (RREG32(CG_THERMAL_STATUS) & ASIC_T_MASK) >>
131 ASIC_T_SHIFT;
132 int actual_temp = temp & 0xff;
133
134 if (temp & 0x100)
135 actual_temp -= 256;
136
137 return actual_temp * 1000;
138}
139
140void r600_pm_get_dynpm_state(struct radeon_device *rdev)
141{
142 int i;
143
144 rdev->pm.dynpm_can_upclock = true;
145 rdev->pm.dynpm_can_downclock = true;
146
147
148 if ((rdev->flags & RADEON_IS_IGP) || (rdev->family == CHIP_R600)) {
149 int min_power_state_index = 0;
150
151 if (rdev->pm.num_power_states > 2)
152 min_power_state_index = 1;
153
154 switch (rdev->pm.dynpm_planned_action) {
155 case DYNPM_ACTION_MINIMUM:
156 rdev->pm.requested_power_state_index = min_power_state_index;
157 rdev->pm.requested_clock_mode_index = 0;
158 rdev->pm.dynpm_can_downclock = false;
159 break;
160 case DYNPM_ACTION_DOWNCLOCK:
161 if (rdev->pm.current_power_state_index == min_power_state_index) {
162 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
163 rdev->pm.dynpm_can_downclock = false;
164 } else {
165 if (rdev->pm.active_crtc_count > 1) {
166 for (i = 0; i < rdev->pm.num_power_states; i++) {
167 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
168 continue;
169 else if (i >= rdev->pm.current_power_state_index) {
170 rdev->pm.requested_power_state_index =
171 rdev->pm.current_power_state_index;
172 break;
173 } else {
174 rdev->pm.requested_power_state_index = i;
175 break;
176 }
177 }
178 } else {
179 if (rdev->pm.current_power_state_index == 0)
180 rdev->pm.requested_power_state_index =
181 rdev->pm.num_power_states - 1;
182 else
183 rdev->pm.requested_power_state_index =
184 rdev->pm.current_power_state_index - 1;
185 }
186 }
187 rdev->pm.requested_clock_mode_index = 0;
188
189 if ((rdev->pm.active_crtc_count > 0) &&
190 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
191 clock_info[rdev->pm.requested_clock_mode_index].flags &
192 RADEON_PM_MODE_NO_DISPLAY)) {
193 rdev->pm.requested_power_state_index++;
194 }
195 break;
196 case DYNPM_ACTION_UPCLOCK:
197 if (rdev->pm.current_power_state_index == (rdev->pm.num_power_states - 1)) {
198 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
199 rdev->pm.dynpm_can_upclock = false;
200 } else {
201 if (rdev->pm.active_crtc_count > 1) {
202 for (i = (rdev->pm.num_power_states - 1); i >= 0; i--) {
203 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
204 continue;
205 else if (i <= rdev->pm.current_power_state_index) {
206 rdev->pm.requested_power_state_index =
207 rdev->pm.current_power_state_index;
208 break;
209 } else {
210 rdev->pm.requested_power_state_index = i;
211 break;
212 }
213 }
214 } else
215 rdev->pm.requested_power_state_index =
216 rdev->pm.current_power_state_index + 1;
217 }
218 rdev->pm.requested_clock_mode_index = 0;
219 break;
220 case DYNPM_ACTION_DEFAULT:
221 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
222 rdev->pm.requested_clock_mode_index = 0;
223 rdev->pm.dynpm_can_upclock = false;
224 break;
225 case DYNPM_ACTION_NONE:
226 default:
227 DRM_ERROR("Requested mode for not defined action\n");
228 return;
229 }
230 } else {
231
232
233
234 if (rdev->pm.active_crtc_count > 1) {
235 rdev->pm.requested_power_state_index = -1;
236
237 for (i = 1; i < rdev->pm.num_power_states; i++) {
238 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
239 continue;
240 else if ((rdev->pm.power_state[i].type == POWER_STATE_TYPE_PERFORMANCE) ||
241 (rdev->pm.power_state[i].type == POWER_STATE_TYPE_BATTERY)) {
242 rdev->pm.requested_power_state_index = i;
243 break;
244 }
245 }
246
247 if (rdev->pm.requested_power_state_index == -1)
248 rdev->pm.requested_power_state_index = 0;
249 } else
250 rdev->pm.requested_power_state_index = 1;
251
252 switch (rdev->pm.dynpm_planned_action) {
253 case DYNPM_ACTION_MINIMUM:
254 rdev->pm.requested_clock_mode_index = 0;
255 rdev->pm.dynpm_can_downclock = false;
256 break;
257 case DYNPM_ACTION_DOWNCLOCK:
258 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
259 if (rdev->pm.current_clock_mode_index == 0) {
260 rdev->pm.requested_clock_mode_index = 0;
261 rdev->pm.dynpm_can_downclock = false;
262 } else
263 rdev->pm.requested_clock_mode_index =
264 rdev->pm.current_clock_mode_index - 1;
265 } else {
266 rdev->pm.requested_clock_mode_index = 0;
267 rdev->pm.dynpm_can_downclock = false;
268 }
269
270 if ((rdev->pm.active_crtc_count > 0) &&
271 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
272 clock_info[rdev->pm.requested_clock_mode_index].flags &
273 RADEON_PM_MODE_NO_DISPLAY)) {
274 rdev->pm.requested_clock_mode_index++;
275 }
276 break;
277 case DYNPM_ACTION_UPCLOCK:
278 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
279 if (rdev->pm.current_clock_mode_index ==
280 (rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1)) {
281 rdev->pm.requested_clock_mode_index = rdev->pm.current_clock_mode_index;
282 rdev->pm.dynpm_can_upclock = false;
283 } else
284 rdev->pm.requested_clock_mode_index =
285 rdev->pm.current_clock_mode_index + 1;
286 } else {
287 rdev->pm.requested_clock_mode_index =
288 rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1;
289 rdev->pm.dynpm_can_upclock = false;
290 }
291 break;
292 case DYNPM_ACTION_DEFAULT:
293 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
294 rdev->pm.requested_clock_mode_index = 0;
295 rdev->pm.dynpm_can_upclock = false;
296 break;
297 case DYNPM_ACTION_NONE:
298 default:
299 DRM_ERROR("Requested mode for not defined action\n");
300 return;
301 }
302 }
303
304 DRM_DEBUG_DRIVER("Requested: e: %d m: %d p: %d\n",
305 rdev->pm.power_state[rdev->pm.requested_power_state_index].
306 clock_info[rdev->pm.requested_clock_mode_index].sclk,
307 rdev->pm.power_state[rdev->pm.requested_power_state_index].
308 clock_info[rdev->pm.requested_clock_mode_index].mclk,
309 rdev->pm.power_state[rdev->pm.requested_power_state_index].
310 pcie_lanes);
311}
312
313void rs780_pm_init_profile(struct radeon_device *rdev)
314{
315 if (rdev->pm.num_power_states == 2) {
316
317 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
318 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
319 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
320 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
321
322 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0;
323 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0;
324 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
325 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
326
327 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0;
328 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 0;
329 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
330 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
331
332 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0;
333 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
334 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
335 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
336
337 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 0;
338 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
339 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
340 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
341
342 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0;
343 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
344 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
345 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
346
347 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0;
348 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 1;
349 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
350 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
351 } else if (rdev->pm.num_power_states == 3) {
352
353 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
354 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
355 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
356 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
357
358 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
359 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
360 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
361 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
362
363 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
364 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
365 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
366 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
367
368 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
369 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 2;
370 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
371 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
372
373 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 1;
374 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 1;
375 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
376 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
377
378 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 1;
379 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 1;
380 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
381 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
382
383 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 1;
384 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
385 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
386 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
387 } else {
388
389 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
390 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
391 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
392 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
393
394 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 2;
395 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 2;
396 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
397 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
398
399 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 2;
400 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 2;
401 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
402 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
403
404 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 2;
405 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 3;
406 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
407 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
408
409 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
410 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
411 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
412 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
413
414 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
415 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
416 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
417 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
418
419 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
420 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 3;
421 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
422 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
423 }
424}
425
426void r600_pm_init_profile(struct radeon_device *rdev)
427{
428 int idx;
429
430 if (rdev->family == CHIP_R600) {
431
432
433 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
434 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
435 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
436 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
437
438 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
439 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
440 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
441 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
442
443 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
444 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
445 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
446 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
447
448 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
449 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
450 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
451 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
452
453 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
454 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
455 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
456 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
457
458 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
459 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
460 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
461 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
462
463 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
464 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
465 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
466 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
467 } else {
468 if (rdev->pm.num_power_states < 4) {
469
470 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
471 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
472 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
473 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
474
475 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
476 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
477 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
478 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
479
480 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
481 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
482 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
483 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
484
485 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
486 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
487 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
488 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
489
490 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
491 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 2;
492 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
493 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
494
495 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
496 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 2;
497 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
498 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
499
500 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
501 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
502 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
503 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
504 } else {
505
506 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
507 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
508 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
509 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
510
511 if (rdev->flags & RADEON_IS_MOBILITY)
512 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
513 else
514 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
515 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
516 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
517 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
518 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
519
520 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
521 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
522 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
523 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
524
525 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
526 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
527 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
528 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
529 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
530
531 if (rdev->flags & RADEON_IS_MOBILITY)
532 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1);
533 else
534 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
535 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
536 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
537 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
538 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
539
540 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
541 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
542 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
543 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
544
545 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
546 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
547 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
548 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
549 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
550 }
551 }
552}
553
554void r600_pm_misc(struct radeon_device *rdev)
555{
556 int req_ps_idx = rdev->pm.requested_power_state_index;
557 int req_cm_idx = rdev->pm.requested_clock_mode_index;
558 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
559 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
560
561 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
562
563 if (voltage->voltage == 0xff01)
564 return;
565 if (voltage->voltage != rdev->pm.current_vddc) {
566 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
567 rdev->pm.current_vddc = voltage->voltage;
568 DRM_DEBUG_DRIVER("Setting: v: %d\n", voltage->voltage);
569 }
570 }
571}
572
573bool r600_gui_idle(struct radeon_device *rdev)
574{
575 if (RREG32(GRBM_STATUS) & GUI_ACTIVE)
576 return false;
577 else
578 return true;
579}
580
581
582bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
583{
584 bool connected = false;
585
586 if (ASIC_IS_DCE3(rdev)) {
587 switch (hpd) {
588 case RADEON_HPD_1:
589 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
590 connected = true;
591 break;
592 case RADEON_HPD_2:
593 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
594 connected = true;
595 break;
596 case RADEON_HPD_3:
597 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
598 connected = true;
599 break;
600 case RADEON_HPD_4:
601 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
602 connected = true;
603 break;
604
605 case RADEON_HPD_5:
606 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
607 connected = true;
608 break;
609 case RADEON_HPD_6:
610 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
611 connected = true;
612 break;
613 default:
614 break;
615 }
616 } else {
617 switch (hpd) {
618 case RADEON_HPD_1:
619 if (RREG32(DC_HOT_PLUG_DETECT1_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
620 connected = true;
621 break;
622 case RADEON_HPD_2:
623 if (RREG32(DC_HOT_PLUG_DETECT2_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
624 connected = true;
625 break;
626 case RADEON_HPD_3:
627 if (RREG32(DC_HOT_PLUG_DETECT3_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
628 connected = true;
629 break;
630 default:
631 break;
632 }
633 }
634 return connected;
635}
636
637void r600_hpd_set_polarity(struct radeon_device *rdev,
638 enum radeon_hpd_id hpd)
639{
640 u32 tmp;
641 bool connected = r600_hpd_sense(rdev, hpd);
642
643 if (ASIC_IS_DCE3(rdev)) {
644 switch (hpd) {
645 case RADEON_HPD_1:
646 tmp = RREG32(DC_HPD1_INT_CONTROL);
647 if (connected)
648 tmp &= ~DC_HPDx_INT_POLARITY;
649 else
650 tmp |= DC_HPDx_INT_POLARITY;
651 WREG32(DC_HPD1_INT_CONTROL, tmp);
652 break;
653 case RADEON_HPD_2:
654 tmp = RREG32(DC_HPD2_INT_CONTROL);
655 if (connected)
656 tmp &= ~DC_HPDx_INT_POLARITY;
657 else
658 tmp |= DC_HPDx_INT_POLARITY;
659 WREG32(DC_HPD2_INT_CONTROL, tmp);
660 break;
661 case RADEON_HPD_3:
662 tmp = RREG32(DC_HPD3_INT_CONTROL);
663 if (connected)
664 tmp &= ~DC_HPDx_INT_POLARITY;
665 else
666 tmp |= DC_HPDx_INT_POLARITY;
667 WREG32(DC_HPD3_INT_CONTROL, tmp);
668 break;
669 case RADEON_HPD_4:
670 tmp = RREG32(DC_HPD4_INT_CONTROL);
671 if (connected)
672 tmp &= ~DC_HPDx_INT_POLARITY;
673 else
674 tmp |= DC_HPDx_INT_POLARITY;
675 WREG32(DC_HPD4_INT_CONTROL, tmp);
676 break;
677 case RADEON_HPD_5:
678 tmp = RREG32(DC_HPD5_INT_CONTROL);
679 if (connected)
680 tmp &= ~DC_HPDx_INT_POLARITY;
681 else
682 tmp |= DC_HPDx_INT_POLARITY;
683 WREG32(DC_HPD5_INT_CONTROL, tmp);
684 break;
685
686 case RADEON_HPD_6:
687 tmp = RREG32(DC_HPD6_INT_CONTROL);
688 if (connected)
689 tmp &= ~DC_HPDx_INT_POLARITY;
690 else
691 tmp |= DC_HPDx_INT_POLARITY;
692 WREG32(DC_HPD6_INT_CONTROL, tmp);
693 break;
694 default:
695 break;
696 }
697 } else {
698 switch (hpd) {
699 case RADEON_HPD_1:
700 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
701 if (connected)
702 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
703 else
704 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
705 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
706 break;
707 case RADEON_HPD_2:
708 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
709 if (connected)
710 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
711 else
712 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
713 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
714 break;
715 case RADEON_HPD_3:
716 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
717 if (connected)
718 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
719 else
720 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
721 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
722 break;
723 default:
724 break;
725 }
726 }
727}
728
729void r600_hpd_init(struct radeon_device *rdev)
730{
731 struct drm_device *dev = rdev->ddev;
732 struct drm_connector *connector;
733 unsigned enable = 0;
734
735 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
736 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
737
738 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
739 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
740
741
742
743
744 continue;
745 }
746 if (ASIC_IS_DCE3(rdev)) {
747 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) | DC_HPDx_RX_INT_TIMER(0xfa);
748 if (ASIC_IS_DCE32(rdev))
749 tmp |= DC_HPDx_EN;
750
751 switch (radeon_connector->hpd.hpd) {
752 case RADEON_HPD_1:
753 WREG32(DC_HPD1_CONTROL, tmp);
754 break;
755 case RADEON_HPD_2:
756 WREG32(DC_HPD2_CONTROL, tmp);
757 break;
758 case RADEON_HPD_3:
759 WREG32(DC_HPD3_CONTROL, tmp);
760 break;
761 case RADEON_HPD_4:
762 WREG32(DC_HPD4_CONTROL, tmp);
763 break;
764
765 case RADEON_HPD_5:
766 WREG32(DC_HPD5_CONTROL, tmp);
767 break;
768 case RADEON_HPD_6:
769 WREG32(DC_HPD6_CONTROL, tmp);
770 break;
771 default:
772 break;
773 }
774 } else {
775 switch (radeon_connector->hpd.hpd) {
776 case RADEON_HPD_1:
777 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, DC_HOT_PLUG_DETECTx_EN);
778 break;
779 case RADEON_HPD_2:
780 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, DC_HOT_PLUG_DETECTx_EN);
781 break;
782 case RADEON_HPD_3:
783 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, DC_HOT_PLUG_DETECTx_EN);
784 break;
785 default:
786 break;
787 }
788 }
789 enable |= 1 << radeon_connector->hpd.hpd;
790 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
791 }
792 radeon_irq_kms_enable_hpd(rdev, enable);
793}
794
795void r600_hpd_fini(struct radeon_device *rdev)
796{
797 struct drm_device *dev = rdev->ddev;
798 struct drm_connector *connector;
799 unsigned disable = 0;
800
801 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
802 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
803 if (ASIC_IS_DCE3(rdev)) {
804 switch (radeon_connector->hpd.hpd) {
805 case RADEON_HPD_1:
806 WREG32(DC_HPD1_CONTROL, 0);
807 break;
808 case RADEON_HPD_2:
809 WREG32(DC_HPD2_CONTROL, 0);
810 break;
811 case RADEON_HPD_3:
812 WREG32(DC_HPD3_CONTROL, 0);
813 break;
814 case RADEON_HPD_4:
815 WREG32(DC_HPD4_CONTROL, 0);
816 break;
817
818 case RADEON_HPD_5:
819 WREG32(DC_HPD5_CONTROL, 0);
820 break;
821 case RADEON_HPD_6:
822 WREG32(DC_HPD6_CONTROL, 0);
823 break;
824 default:
825 break;
826 }
827 } else {
828 switch (radeon_connector->hpd.hpd) {
829 case RADEON_HPD_1:
830 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, 0);
831 break;
832 case RADEON_HPD_2:
833 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, 0);
834 break;
835 case RADEON_HPD_3:
836 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, 0);
837 break;
838 default:
839 break;
840 }
841 }
842 disable |= 1 << radeon_connector->hpd.hpd;
843 }
844 radeon_irq_kms_disable_hpd(rdev, disable);
845}
846
847
848
849
850void r600_pcie_gart_tlb_flush(struct radeon_device *rdev)
851{
852 unsigned i;
853 u32 tmp;
854
855
856 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
857 !(rdev->flags & RADEON_IS_AGP)) {
858 void __iomem *ptr = (void *)rdev->gart.ptr;
859 u32 tmp;
860
861
862
863
864
865
866 WREG32(HDP_DEBUG1, 0);
867 tmp = readl((void __iomem *)ptr);
868 } else
869 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
870
871 WREG32(VM_CONTEXT0_INVALIDATION_LOW_ADDR, rdev->mc.gtt_start >> 12);
872 WREG32(VM_CONTEXT0_INVALIDATION_HIGH_ADDR, (rdev->mc.gtt_end - 1) >> 12);
873 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
874 for (i = 0; i < rdev->usec_timeout; i++) {
875
876 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
877 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
878 if (tmp == 2) {
879 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
880 return;
881 }
882 if (tmp) {
883 return;
884 }
885 udelay(1);
886 }
887}
888
889int r600_pcie_gart_init(struct radeon_device *rdev)
890{
891 int r;
892
893 if (rdev->gart.robj) {
894 WARN(1, "R600 PCIE GART already initialized\n");
895 return 0;
896 }
897
898 r = radeon_gart_init(rdev);
899 if (r)
900 return r;
901 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8;
902 return radeon_gart_table_vram_alloc(rdev);
903}
904
905static int r600_pcie_gart_enable(struct radeon_device *rdev)
906{
907 u32 tmp;
908 int r, i;
909
910 if (rdev->gart.robj == NULL) {
911 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
912 return -EINVAL;
913 }
914 r = radeon_gart_table_vram_pin(rdev);
915 if (r)
916 return r;
917 radeon_gart_restore(rdev);
918
919
920 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
921 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
922 EFFECTIVE_L2_QUEUE_SIZE(7));
923 WREG32(VM_L2_CNTL2, 0);
924 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
925
926 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
927 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
928 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
929 ENABLE_WAIT_L2_QUERY;
930 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
931 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
932 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp | ENABLE_L1_STRICT_ORDERING);
933 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
934 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
935 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
936 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
937 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
938 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
939 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
940 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
941 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
942 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
943 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
944 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
945 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
946 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
947 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
948 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
949 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
950 (u32)(rdev->dummy_page.addr >> 12));
951 for (i = 1; i < 7; i++)
952 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
953
954 r600_pcie_gart_tlb_flush(rdev);
955 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
956 (unsigned)(rdev->mc.gtt_size >> 20),
957 (unsigned long long)rdev->gart.table_addr);
958 rdev->gart.ready = true;
959 return 0;
960}
961
962static void r600_pcie_gart_disable(struct radeon_device *rdev)
963{
964 u32 tmp;
965 int i;
966
967
968 for (i = 0; i < 7; i++)
969 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
970
971
972 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
973 EFFECTIVE_L2_QUEUE_SIZE(7));
974 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
975
976 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
977 ENABLE_WAIT_L2_QUERY;
978 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
979 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
980 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
981 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
982 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
983 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
984 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
985 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
986 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp);
987 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp);
988 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
989 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
990 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp);
991 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
992 radeon_gart_table_vram_unpin(rdev);
993}
994
995static void r600_pcie_gart_fini(struct radeon_device *rdev)
996{
997 radeon_gart_fini(rdev);
998 r600_pcie_gart_disable(rdev);
999 radeon_gart_table_vram_free(rdev);
1000}
1001
1002static void r600_agp_enable(struct radeon_device *rdev)
1003{
1004 u32 tmp;
1005 int i;
1006
1007
1008 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
1009 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
1010 EFFECTIVE_L2_QUEUE_SIZE(7));
1011 WREG32(VM_L2_CNTL2, 0);
1012 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
1013
1014 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
1015 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
1016 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
1017 ENABLE_WAIT_L2_QUERY;
1018 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
1019 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
1020 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp | ENABLE_L1_STRICT_ORDERING);
1021 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1022 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1023 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1024 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1025 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1026 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1027 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1028 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1029 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1030 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1031 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1032 for (i = 0; i < 7; i++)
1033 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1034}
1035
1036int r600_mc_wait_for_idle(struct radeon_device *rdev)
1037{
1038 unsigned i;
1039 u32 tmp;
1040
1041 for (i = 0; i < rdev->usec_timeout; i++) {
1042
1043 tmp = RREG32(R_000E50_SRBM_STATUS) & 0x3F00;
1044 if (!tmp)
1045 return 0;
1046 udelay(1);
1047 }
1048 return -1;
1049}
1050
1051uint32_t rs780_mc_rreg(struct radeon_device *rdev, uint32_t reg)
1052{
1053 unsigned long flags;
1054 uint32_t r;
1055
1056 spin_lock_irqsave(&rdev->mc_idx_lock, flags);
1057 WREG32(R_0028F8_MC_INDEX, S_0028F8_MC_IND_ADDR(reg));
1058 r = RREG32(R_0028FC_MC_DATA);
1059 WREG32(R_0028F8_MC_INDEX, ~C_0028F8_MC_IND_ADDR);
1060 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
1061 return r;
1062}
1063
1064void rs780_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v)
1065{
1066 unsigned long flags;
1067
1068 spin_lock_irqsave(&rdev->mc_idx_lock, flags);
1069 WREG32(R_0028F8_MC_INDEX, S_0028F8_MC_IND_ADDR(reg) |
1070 S_0028F8_MC_IND_WR_EN(1));
1071 WREG32(R_0028FC_MC_DATA, v);
1072 WREG32(R_0028F8_MC_INDEX, 0x7F);
1073 spin_unlock_irqrestore(&rdev->mc_idx_lock, flags);
1074}
1075
1076static void r600_mc_program(struct radeon_device *rdev)
1077{
1078 struct rv515_mc_save save;
1079 u32 tmp;
1080 int i, j;
1081
1082
1083 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1084 WREG32((0x2c14 + j), 0x00000000);
1085 WREG32((0x2c18 + j), 0x00000000);
1086 WREG32((0x2c1c + j), 0x00000000);
1087 WREG32((0x2c20 + j), 0x00000000);
1088 WREG32((0x2c24 + j), 0x00000000);
1089 }
1090 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
1091
1092 rv515_mc_stop(rdev, &save);
1093 if (r600_mc_wait_for_idle(rdev)) {
1094 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1095 }
1096
1097 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
1098
1099 if (rdev->flags & RADEON_IS_AGP) {
1100 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1101
1102 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1103 rdev->mc.vram_start >> 12);
1104 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1105 rdev->mc.gtt_end >> 12);
1106 } else {
1107
1108 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1109 rdev->mc.gtt_start >> 12);
1110 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1111 rdev->mc.vram_end >> 12);
1112 }
1113 } else {
1114 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start >> 12);
1115 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end >> 12);
1116 }
1117 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
1118 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1119 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1120 WREG32(MC_VM_FB_LOCATION, tmp);
1121 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1122 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
1123 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
1124 if (rdev->flags & RADEON_IS_AGP) {
1125 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 22);
1126 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 22);
1127 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1128 } else {
1129 WREG32(MC_VM_AGP_BASE, 0);
1130 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
1131 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
1132 }
1133 if (r600_mc_wait_for_idle(rdev)) {
1134 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1135 }
1136 rv515_mc_resume(rdev, &save);
1137
1138
1139 rv515_vga_render_disable(rdev);
1140}
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163static void r600_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
1164{
1165 u64 size_bf, size_af;
1166
1167 if (mc->mc_vram_size > 0xE0000000) {
1168
1169 dev_warn(rdev->dev, "limiting VRAM\n");
1170 mc->real_vram_size = 0xE0000000;
1171 mc->mc_vram_size = 0xE0000000;
1172 }
1173 if (rdev->flags & RADEON_IS_AGP) {
1174 size_bf = mc->gtt_start;
1175 size_af = mc->mc_mask - mc->gtt_end;
1176 if (size_bf > size_af) {
1177 if (mc->mc_vram_size > size_bf) {
1178 dev_warn(rdev->dev, "limiting VRAM\n");
1179 mc->real_vram_size = size_bf;
1180 mc->mc_vram_size = size_bf;
1181 }
1182 mc->vram_start = mc->gtt_start - mc->mc_vram_size;
1183 } else {
1184 if (mc->mc_vram_size > size_af) {
1185 dev_warn(rdev->dev, "limiting VRAM\n");
1186 mc->real_vram_size = size_af;
1187 mc->mc_vram_size = size_af;
1188 }
1189 mc->vram_start = mc->gtt_end + 1;
1190 }
1191 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
1192 dev_info(rdev->dev, "VRAM: %lluM 0x%08llX - 0x%08llX (%lluM used)\n",
1193 mc->mc_vram_size >> 20, mc->vram_start,
1194 mc->vram_end, mc->real_vram_size >> 20);
1195 } else {
1196 u64 base = 0;
1197 if (rdev->flags & RADEON_IS_IGP) {
1198 base = RREG32(MC_VM_FB_LOCATION) & 0xFFFF;
1199 base <<= 24;
1200 }
1201 radeon_vram_location(rdev, &rdev->mc, base);
1202 rdev->mc.gtt_base_align = 0;
1203 radeon_gtt_location(rdev, mc);
1204 }
1205}
1206
1207static int r600_mc_init(struct radeon_device *rdev)
1208{
1209 u32 tmp;
1210 int chansize, numchan;
1211 uint32_t h_addr, l_addr;
1212 unsigned long long k8_addr;
1213
1214
1215 rdev->mc.vram_is_ddr = true;
1216 tmp = RREG32(RAMCFG);
1217 if (tmp & CHANSIZE_OVERRIDE) {
1218 chansize = 16;
1219 } else if (tmp & CHANSIZE_MASK) {
1220 chansize = 64;
1221 } else {
1222 chansize = 32;
1223 }
1224 tmp = RREG32(CHMAP);
1225 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1226 case 0:
1227 default:
1228 numchan = 1;
1229 break;
1230 case 1:
1231 numchan = 2;
1232 break;
1233 case 2:
1234 numchan = 4;
1235 break;
1236 case 3:
1237 numchan = 8;
1238 break;
1239 }
1240 rdev->mc.vram_width = numchan * chansize;
1241
1242 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1243 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1244
1245 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
1246 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
1247 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1248 r600_vram_gtt_location(rdev, &rdev->mc);
1249
1250 if (rdev->flags & RADEON_IS_IGP) {
1251 rs690_pm_info(rdev);
1252 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
1253
1254 if (rdev->family == CHIP_RS780 || rdev->family == CHIP_RS880) {
1255
1256 rdev->fastfb_working = false;
1257 h_addr = G_000012_K8_ADDR_EXT(RREG32_MC(R_000012_MC_MISC_UMA_CNTL));
1258 l_addr = RREG32_MC(R_000011_K8_FB_LOCATION);
1259 k8_addr = ((unsigned long long)h_addr) << 32 | l_addr;
1260#if defined(CONFIG_X86_32) && !defined(CONFIG_X86_PAE)
1261 if (k8_addr + rdev->mc.visible_vram_size < 0x100000000ULL)
1262#endif
1263 {
1264
1265
1266
1267 if (rdev->mc.igp_sideport_enabled == false && radeon_fastfb == 1) {
1268 DRM_INFO("Direct mapping: aper base at 0x%llx, replaced by direct mapping base 0x%llx.\n",
1269 (unsigned long long)rdev->mc.aper_base, k8_addr);
1270 rdev->mc.aper_base = (resource_size_t)k8_addr;
1271 rdev->fastfb_working = true;
1272 }
1273 }
1274 }
1275 }
1276
1277 radeon_update_bandwidth_info(rdev);
1278 return 0;
1279}
1280
1281int r600_vram_scratch_init(struct radeon_device *rdev)
1282{
1283 int r;
1284
1285 if (rdev->vram_scratch.robj == NULL) {
1286 r = radeon_bo_create(rdev, RADEON_GPU_PAGE_SIZE,
1287 PAGE_SIZE, true, RADEON_GEM_DOMAIN_VRAM,
1288 NULL, &rdev->vram_scratch.robj);
1289 if (r) {
1290 return r;
1291 }
1292 }
1293
1294 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1295 if (unlikely(r != 0))
1296 return r;
1297 r = radeon_bo_pin(rdev->vram_scratch.robj,
1298 RADEON_GEM_DOMAIN_VRAM, &rdev->vram_scratch.gpu_addr);
1299 if (r) {
1300 radeon_bo_unreserve(rdev->vram_scratch.robj);
1301 return r;
1302 }
1303 r = radeon_bo_kmap(rdev->vram_scratch.robj,
1304 (void **)&rdev->vram_scratch.ptr);
1305 if (r)
1306 radeon_bo_unpin(rdev->vram_scratch.robj);
1307 radeon_bo_unreserve(rdev->vram_scratch.robj);
1308
1309 return r;
1310}
1311
1312void r600_vram_scratch_fini(struct radeon_device *rdev)
1313{
1314 int r;
1315
1316 if (rdev->vram_scratch.robj == NULL) {
1317 return;
1318 }
1319 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1320 if (likely(r == 0)) {
1321 radeon_bo_kunmap(rdev->vram_scratch.robj);
1322 radeon_bo_unpin(rdev->vram_scratch.robj);
1323 radeon_bo_unreserve(rdev->vram_scratch.robj);
1324 }
1325 radeon_bo_unref(&rdev->vram_scratch.robj);
1326}
1327
1328void r600_set_bios_scratch_engine_hung(struct radeon_device *rdev, bool hung)
1329{
1330 u32 tmp = RREG32(R600_BIOS_3_SCRATCH);
1331
1332 if (hung)
1333 tmp |= ATOM_S3_ASIC_GUI_ENGINE_HUNG;
1334 else
1335 tmp &= ~ATOM_S3_ASIC_GUI_ENGINE_HUNG;
1336
1337 WREG32(R600_BIOS_3_SCRATCH, tmp);
1338}
1339
1340static void r600_print_gpu_status_regs(struct radeon_device *rdev)
1341{
1342 dev_info(rdev->dev, " R_008010_GRBM_STATUS = 0x%08X\n",
1343 RREG32(R_008010_GRBM_STATUS));
1344 dev_info(rdev->dev, " R_008014_GRBM_STATUS2 = 0x%08X\n",
1345 RREG32(R_008014_GRBM_STATUS2));
1346 dev_info(rdev->dev, " R_000E50_SRBM_STATUS = 0x%08X\n",
1347 RREG32(R_000E50_SRBM_STATUS));
1348 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
1349 RREG32(CP_STALLED_STAT1));
1350 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
1351 RREG32(CP_STALLED_STAT2));
1352 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
1353 RREG32(CP_BUSY_STAT));
1354 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
1355 RREG32(CP_STAT));
1356 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
1357 RREG32(DMA_STATUS_REG));
1358}
1359
1360static bool r600_is_display_hung(struct radeon_device *rdev)
1361{
1362 u32 crtc_hung = 0;
1363 u32 crtc_status[2];
1364 u32 i, j, tmp;
1365
1366 for (i = 0; i < rdev->num_crtc; i++) {
1367 if (RREG32(AVIVO_D1CRTC_CONTROL + crtc_offsets[i]) & AVIVO_CRTC_EN) {
1368 crtc_status[i] = RREG32(AVIVO_D1CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
1369 crtc_hung |= (1 << i);
1370 }
1371 }
1372
1373 for (j = 0; j < 10; j++) {
1374 for (i = 0; i < rdev->num_crtc; i++) {
1375 if (crtc_hung & (1 << i)) {
1376 tmp = RREG32(AVIVO_D1CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
1377 if (tmp != crtc_status[i])
1378 crtc_hung &= ~(1 << i);
1379 }
1380 }
1381 if (crtc_hung == 0)
1382 return false;
1383 udelay(100);
1384 }
1385
1386 return true;
1387}
1388
1389u32 r600_gpu_check_soft_reset(struct radeon_device *rdev)
1390{
1391 u32 reset_mask = 0;
1392 u32 tmp;
1393
1394
1395 tmp = RREG32(R_008010_GRBM_STATUS);
1396 if (rdev->family >= CHIP_RV770) {
1397 if (G_008010_PA_BUSY(tmp) | G_008010_SC_BUSY(tmp) |
1398 G_008010_SH_BUSY(tmp) | G_008010_SX_BUSY(tmp) |
1399 G_008010_TA_BUSY(tmp) | G_008010_VGT_BUSY(tmp) |
1400 G_008010_DB03_BUSY(tmp) | G_008010_CB03_BUSY(tmp) |
1401 G_008010_SPI03_BUSY(tmp) | G_008010_VGT_BUSY_NO_DMA(tmp))
1402 reset_mask |= RADEON_RESET_GFX;
1403 } else {
1404 if (G_008010_PA_BUSY(tmp) | G_008010_SC_BUSY(tmp) |
1405 G_008010_SH_BUSY(tmp) | G_008010_SX_BUSY(tmp) |
1406 G_008010_TA03_BUSY(tmp) | G_008010_VGT_BUSY(tmp) |
1407 G_008010_DB03_BUSY(tmp) | G_008010_CB03_BUSY(tmp) |
1408 G_008010_SPI03_BUSY(tmp) | G_008010_VGT_BUSY_NO_DMA(tmp))
1409 reset_mask |= RADEON_RESET_GFX;
1410 }
1411
1412 if (G_008010_CF_RQ_PENDING(tmp) | G_008010_PF_RQ_PENDING(tmp) |
1413 G_008010_CP_BUSY(tmp) | G_008010_CP_COHERENCY_BUSY(tmp))
1414 reset_mask |= RADEON_RESET_CP;
1415
1416 if (G_008010_GRBM_EE_BUSY(tmp))
1417 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
1418
1419
1420 tmp = RREG32(DMA_STATUS_REG);
1421 if (!(tmp & DMA_IDLE))
1422 reset_mask |= RADEON_RESET_DMA;
1423
1424
1425 tmp = RREG32(R_000E50_SRBM_STATUS);
1426 if (G_000E50_RLC_RQ_PENDING(tmp) | G_000E50_RLC_BUSY(tmp))
1427 reset_mask |= RADEON_RESET_RLC;
1428
1429 if (G_000E50_IH_BUSY(tmp))
1430 reset_mask |= RADEON_RESET_IH;
1431
1432 if (G_000E50_SEM_BUSY(tmp))
1433 reset_mask |= RADEON_RESET_SEM;
1434
1435 if (G_000E50_GRBM_RQ_PENDING(tmp))
1436 reset_mask |= RADEON_RESET_GRBM;
1437
1438 if (G_000E50_VMC_BUSY(tmp))
1439 reset_mask |= RADEON_RESET_VMC;
1440
1441 if (G_000E50_MCB_BUSY(tmp) | G_000E50_MCDZ_BUSY(tmp) |
1442 G_000E50_MCDY_BUSY(tmp) | G_000E50_MCDX_BUSY(tmp) |
1443 G_000E50_MCDW_BUSY(tmp))
1444 reset_mask |= RADEON_RESET_MC;
1445
1446 if (r600_is_display_hung(rdev))
1447 reset_mask |= RADEON_RESET_DISPLAY;
1448
1449
1450 if (reset_mask & RADEON_RESET_MC) {
1451 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
1452 reset_mask &= ~RADEON_RESET_MC;
1453 }
1454
1455 return reset_mask;
1456}
1457
1458static void r600_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
1459{
1460 struct rv515_mc_save save;
1461 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
1462 u32 tmp;
1463
1464 if (reset_mask == 0)
1465 return;
1466
1467 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
1468
1469 r600_print_gpu_status_regs(rdev);
1470
1471
1472 if (rdev->family >= CHIP_RV770)
1473 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1) | S_0086D8_CP_PFP_HALT(1));
1474 else
1475 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
1476
1477
1478 WREG32(RLC_CNTL, 0);
1479
1480 if (reset_mask & RADEON_RESET_DMA) {
1481
1482 tmp = RREG32(DMA_RB_CNTL);
1483 tmp &= ~DMA_RB_ENABLE;
1484 WREG32(DMA_RB_CNTL, tmp);
1485 }
1486
1487 mdelay(50);
1488
1489 rv515_mc_stop(rdev, &save);
1490 if (r600_mc_wait_for_idle(rdev)) {
1491 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1492 }
1493
1494 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
1495 if (rdev->family >= CHIP_RV770)
1496 grbm_soft_reset |= S_008020_SOFT_RESET_DB(1) |
1497 S_008020_SOFT_RESET_CB(1) |
1498 S_008020_SOFT_RESET_PA(1) |
1499 S_008020_SOFT_RESET_SC(1) |
1500 S_008020_SOFT_RESET_SPI(1) |
1501 S_008020_SOFT_RESET_SX(1) |
1502 S_008020_SOFT_RESET_SH(1) |
1503 S_008020_SOFT_RESET_TC(1) |
1504 S_008020_SOFT_RESET_TA(1) |
1505 S_008020_SOFT_RESET_VC(1) |
1506 S_008020_SOFT_RESET_VGT(1);
1507 else
1508 grbm_soft_reset |= S_008020_SOFT_RESET_CR(1) |
1509 S_008020_SOFT_RESET_DB(1) |
1510 S_008020_SOFT_RESET_CB(1) |
1511 S_008020_SOFT_RESET_PA(1) |
1512 S_008020_SOFT_RESET_SC(1) |
1513 S_008020_SOFT_RESET_SMX(1) |
1514 S_008020_SOFT_RESET_SPI(1) |
1515 S_008020_SOFT_RESET_SX(1) |
1516 S_008020_SOFT_RESET_SH(1) |
1517 S_008020_SOFT_RESET_TC(1) |
1518 S_008020_SOFT_RESET_TA(1) |
1519 S_008020_SOFT_RESET_VC(1) |
1520 S_008020_SOFT_RESET_VGT(1);
1521 }
1522
1523 if (reset_mask & RADEON_RESET_CP) {
1524 grbm_soft_reset |= S_008020_SOFT_RESET_CP(1) |
1525 S_008020_SOFT_RESET_VGT(1);
1526
1527 srbm_soft_reset |= S_000E60_SOFT_RESET_GRBM(1);
1528 }
1529
1530 if (reset_mask & RADEON_RESET_DMA) {
1531 if (rdev->family >= CHIP_RV770)
1532 srbm_soft_reset |= RV770_SOFT_RESET_DMA;
1533 else
1534 srbm_soft_reset |= SOFT_RESET_DMA;
1535 }
1536
1537 if (reset_mask & RADEON_RESET_RLC)
1538 srbm_soft_reset |= S_000E60_SOFT_RESET_RLC(1);
1539
1540 if (reset_mask & RADEON_RESET_SEM)
1541 srbm_soft_reset |= S_000E60_SOFT_RESET_SEM(1);
1542
1543 if (reset_mask & RADEON_RESET_IH)
1544 srbm_soft_reset |= S_000E60_SOFT_RESET_IH(1);
1545
1546 if (reset_mask & RADEON_RESET_GRBM)
1547 srbm_soft_reset |= S_000E60_SOFT_RESET_GRBM(1);
1548
1549 if (!(rdev->flags & RADEON_IS_IGP)) {
1550 if (reset_mask & RADEON_RESET_MC)
1551 srbm_soft_reset |= S_000E60_SOFT_RESET_MC(1);
1552 }
1553
1554 if (reset_mask & RADEON_RESET_VMC)
1555 srbm_soft_reset |= S_000E60_SOFT_RESET_VMC(1);
1556
1557 if (grbm_soft_reset) {
1558 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1559 tmp |= grbm_soft_reset;
1560 dev_info(rdev->dev, "R_008020_GRBM_SOFT_RESET=0x%08X\n", tmp);
1561 WREG32(R_008020_GRBM_SOFT_RESET, tmp);
1562 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1563
1564 udelay(50);
1565
1566 tmp &= ~grbm_soft_reset;
1567 WREG32(R_008020_GRBM_SOFT_RESET, tmp);
1568 tmp = RREG32(R_008020_GRBM_SOFT_RESET);
1569 }
1570
1571 if (srbm_soft_reset) {
1572 tmp = RREG32(SRBM_SOFT_RESET);
1573 tmp |= srbm_soft_reset;
1574 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
1575 WREG32(SRBM_SOFT_RESET, tmp);
1576 tmp = RREG32(SRBM_SOFT_RESET);
1577
1578 udelay(50);
1579
1580 tmp &= ~srbm_soft_reset;
1581 WREG32(SRBM_SOFT_RESET, tmp);
1582 tmp = RREG32(SRBM_SOFT_RESET);
1583 }
1584
1585
1586 mdelay(1);
1587
1588 rv515_mc_resume(rdev, &save);
1589 udelay(50);
1590
1591 r600_print_gpu_status_regs(rdev);
1592}
1593
1594int r600_asic_reset(struct radeon_device *rdev)
1595{
1596 u32 reset_mask;
1597
1598 reset_mask = r600_gpu_check_soft_reset(rdev);
1599
1600 if (reset_mask)
1601 r600_set_bios_scratch_engine_hung(rdev, true);
1602
1603 r600_gpu_soft_reset(rdev, reset_mask);
1604
1605 reset_mask = r600_gpu_check_soft_reset(rdev);
1606
1607 if (!reset_mask)
1608 r600_set_bios_scratch_engine_hung(rdev, false);
1609
1610 return 0;
1611}
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622bool r600_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
1623{
1624 u32 reset_mask = r600_gpu_check_soft_reset(rdev);
1625
1626 if (!(reset_mask & (RADEON_RESET_GFX |
1627 RADEON_RESET_COMPUTE |
1628 RADEON_RESET_CP))) {
1629 radeon_ring_lockup_update(ring);
1630 return false;
1631 }
1632
1633 radeon_ring_force_activity(rdev, ring);
1634 return radeon_ring_test_lockup(rdev, ring);
1635}
1636
1637u32 r6xx_remap_render_backend(struct radeon_device *rdev,
1638 u32 tiling_pipe_num,
1639 u32 max_rb_num,
1640 u32 total_max_rb_num,
1641 u32 disabled_rb_mask)
1642{
1643 u32 rendering_pipe_num, rb_num_width, req_rb_num;
1644 u32 pipe_rb_ratio, pipe_rb_remain, tmp;
1645 u32 data = 0, mask = 1 << (max_rb_num - 1);
1646 unsigned i, j;
1647
1648
1649 tmp = disabled_rb_mask | ((0xff << max_rb_num) & 0xff);
1650
1651 if ((tmp & 0xff) != 0xff)
1652 disabled_rb_mask = tmp;
1653
1654 rendering_pipe_num = 1 << tiling_pipe_num;
1655 req_rb_num = total_max_rb_num - r600_count_pipe_bits(disabled_rb_mask);
1656 BUG_ON(rendering_pipe_num < req_rb_num);
1657
1658 pipe_rb_ratio = rendering_pipe_num / req_rb_num;
1659 pipe_rb_remain = rendering_pipe_num - pipe_rb_ratio * req_rb_num;
1660
1661 if (rdev->family <= CHIP_RV740) {
1662
1663 rb_num_width = 2;
1664 } else {
1665
1666 rb_num_width = 4;
1667 }
1668
1669 for (i = 0; i < max_rb_num; i++) {
1670 if (!(mask & disabled_rb_mask)) {
1671 for (j = 0; j < pipe_rb_ratio; j++) {
1672 data <<= rb_num_width;
1673 data |= max_rb_num - i - 1;
1674 }
1675 if (pipe_rb_remain) {
1676 data <<= rb_num_width;
1677 data |= max_rb_num - i - 1;
1678 pipe_rb_remain--;
1679 }
1680 }
1681 mask >>= 1;
1682 }
1683
1684 return data;
1685}
1686
1687int r600_count_pipe_bits(uint32_t val)
1688{
1689 return hweight32(val);
1690}
1691
1692static void r600_gpu_init(struct radeon_device *rdev)
1693{
1694 u32 tiling_config;
1695 u32 ramcfg;
1696 u32 cc_rb_backend_disable;
1697 u32 cc_gc_shader_pipe_config;
1698 u32 tmp;
1699 int i, j;
1700 u32 sq_config;
1701 u32 sq_gpr_resource_mgmt_1 = 0;
1702 u32 sq_gpr_resource_mgmt_2 = 0;
1703 u32 sq_thread_resource_mgmt = 0;
1704 u32 sq_stack_resource_mgmt_1 = 0;
1705 u32 sq_stack_resource_mgmt_2 = 0;
1706 u32 disabled_rb_mask;
1707
1708 rdev->config.r600.tiling_group_size = 256;
1709 switch (rdev->family) {
1710 case CHIP_R600:
1711 rdev->config.r600.max_pipes = 4;
1712 rdev->config.r600.max_tile_pipes = 8;
1713 rdev->config.r600.max_simds = 4;
1714 rdev->config.r600.max_backends = 4;
1715 rdev->config.r600.max_gprs = 256;
1716 rdev->config.r600.max_threads = 192;
1717 rdev->config.r600.max_stack_entries = 256;
1718 rdev->config.r600.max_hw_contexts = 8;
1719 rdev->config.r600.max_gs_threads = 16;
1720 rdev->config.r600.sx_max_export_size = 128;
1721 rdev->config.r600.sx_max_export_pos_size = 16;
1722 rdev->config.r600.sx_max_export_smx_size = 128;
1723 rdev->config.r600.sq_num_cf_insts = 2;
1724 break;
1725 case CHIP_RV630:
1726 case CHIP_RV635:
1727 rdev->config.r600.max_pipes = 2;
1728 rdev->config.r600.max_tile_pipes = 2;
1729 rdev->config.r600.max_simds = 3;
1730 rdev->config.r600.max_backends = 1;
1731 rdev->config.r600.max_gprs = 128;
1732 rdev->config.r600.max_threads = 192;
1733 rdev->config.r600.max_stack_entries = 128;
1734 rdev->config.r600.max_hw_contexts = 8;
1735 rdev->config.r600.max_gs_threads = 4;
1736 rdev->config.r600.sx_max_export_size = 128;
1737 rdev->config.r600.sx_max_export_pos_size = 16;
1738 rdev->config.r600.sx_max_export_smx_size = 128;
1739 rdev->config.r600.sq_num_cf_insts = 2;
1740 break;
1741 case CHIP_RV610:
1742 case CHIP_RV620:
1743 case CHIP_RS780:
1744 case CHIP_RS880:
1745 rdev->config.r600.max_pipes = 1;
1746 rdev->config.r600.max_tile_pipes = 1;
1747 rdev->config.r600.max_simds = 2;
1748 rdev->config.r600.max_backends = 1;
1749 rdev->config.r600.max_gprs = 128;
1750 rdev->config.r600.max_threads = 192;
1751 rdev->config.r600.max_stack_entries = 128;
1752 rdev->config.r600.max_hw_contexts = 4;
1753 rdev->config.r600.max_gs_threads = 4;
1754 rdev->config.r600.sx_max_export_size = 128;
1755 rdev->config.r600.sx_max_export_pos_size = 16;
1756 rdev->config.r600.sx_max_export_smx_size = 128;
1757 rdev->config.r600.sq_num_cf_insts = 1;
1758 break;
1759 case CHIP_RV670:
1760 rdev->config.r600.max_pipes = 4;
1761 rdev->config.r600.max_tile_pipes = 4;
1762 rdev->config.r600.max_simds = 4;
1763 rdev->config.r600.max_backends = 4;
1764 rdev->config.r600.max_gprs = 192;
1765 rdev->config.r600.max_threads = 192;
1766 rdev->config.r600.max_stack_entries = 256;
1767 rdev->config.r600.max_hw_contexts = 8;
1768 rdev->config.r600.max_gs_threads = 16;
1769 rdev->config.r600.sx_max_export_size = 128;
1770 rdev->config.r600.sx_max_export_pos_size = 16;
1771 rdev->config.r600.sx_max_export_smx_size = 128;
1772 rdev->config.r600.sq_num_cf_insts = 2;
1773 break;
1774 default:
1775 break;
1776 }
1777
1778
1779 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1780 WREG32((0x2c14 + j), 0x00000000);
1781 WREG32((0x2c18 + j), 0x00000000);
1782 WREG32((0x2c1c + j), 0x00000000);
1783 WREG32((0x2c20 + j), 0x00000000);
1784 WREG32((0x2c24 + j), 0x00000000);
1785 }
1786
1787 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
1788
1789
1790 tiling_config = 0;
1791 ramcfg = RREG32(RAMCFG);
1792 switch (rdev->config.r600.max_tile_pipes) {
1793 case 1:
1794 tiling_config |= PIPE_TILING(0);
1795 break;
1796 case 2:
1797 tiling_config |= PIPE_TILING(1);
1798 break;
1799 case 4:
1800 tiling_config |= PIPE_TILING(2);
1801 break;
1802 case 8:
1803 tiling_config |= PIPE_TILING(3);
1804 break;
1805 default:
1806 break;
1807 }
1808 rdev->config.r600.tiling_npipes = rdev->config.r600.max_tile_pipes;
1809 rdev->config.r600.tiling_nbanks = 4 << ((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
1810 tiling_config |= BANK_TILING((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
1811 tiling_config |= GROUP_SIZE((ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
1812
1813 tmp = (ramcfg & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
1814 if (tmp > 3) {
1815 tiling_config |= ROW_TILING(3);
1816 tiling_config |= SAMPLE_SPLIT(3);
1817 } else {
1818 tiling_config |= ROW_TILING(tmp);
1819 tiling_config |= SAMPLE_SPLIT(tmp);
1820 }
1821 tiling_config |= BANK_SWAPS(1);
1822
1823 cc_rb_backend_disable = RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000;
1824 tmp = R6XX_MAX_BACKENDS -
1825 r600_count_pipe_bits((cc_rb_backend_disable >> 16) & R6XX_MAX_BACKENDS_MASK);
1826 if (tmp < rdev->config.r600.max_backends) {
1827 rdev->config.r600.max_backends = tmp;
1828 }
1829
1830 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0x00ffff00;
1831 tmp = R6XX_MAX_PIPES -
1832 r600_count_pipe_bits((cc_gc_shader_pipe_config >> 8) & R6XX_MAX_PIPES_MASK);
1833 if (tmp < rdev->config.r600.max_pipes) {
1834 rdev->config.r600.max_pipes = tmp;
1835 }
1836 tmp = R6XX_MAX_SIMDS -
1837 r600_count_pipe_bits((cc_gc_shader_pipe_config >> 16) & R6XX_MAX_SIMDS_MASK);
1838 if (tmp < rdev->config.r600.max_simds) {
1839 rdev->config.r600.max_simds = tmp;
1840 }
1841
1842 disabled_rb_mask = (RREG32(CC_RB_BACKEND_DISABLE) >> 16) & R6XX_MAX_BACKENDS_MASK;
1843 tmp = (tiling_config & PIPE_TILING__MASK) >> PIPE_TILING__SHIFT;
1844 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.r600.max_backends,
1845 R6XX_MAX_BACKENDS, disabled_rb_mask);
1846 tiling_config |= tmp << 16;
1847 rdev->config.r600.backend_map = tmp;
1848
1849 rdev->config.r600.tile_config = tiling_config;
1850 WREG32(GB_TILING_CONFIG, tiling_config);
1851 WREG32(DCP_TILING_CONFIG, tiling_config & 0xffff);
1852 WREG32(HDP_TILING_CONFIG, tiling_config & 0xffff);
1853 WREG32(DMA_TILING_CONFIG, tiling_config & 0xffff);
1854
1855 tmp = R6XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK) >> 8);
1856 WREG32(VGT_OUT_DEALLOC_CNTL, (tmp * 4) & DEALLOC_DIST_MASK);
1857 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((tmp * 4) - 2) & VTX_REUSE_DEPTH_MASK);
1858
1859
1860 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) | ROQ_IB2_START(0x2b)));
1861 WREG32(CP_MEQ_THRESHOLDS, (MEQ_END(0x40) | ROQ_END(0x40)));
1862
1863 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO | SYNC_GRADIENT |
1864 SYNC_WALKER | SYNC_ALIGNER));
1865
1866 if (rdev->family == CHIP_RV670)
1867 WREG32(ARB_GDEC_RD_CNTL, 0x00000021);
1868
1869 tmp = RREG32(SX_DEBUG_1);
1870 tmp |= SMX_EVENT_RELEASE;
1871 if ((rdev->family > CHIP_R600))
1872 tmp |= ENABLE_NEW_SMX_ADDRESS;
1873 WREG32(SX_DEBUG_1, tmp);
1874
1875 if (((rdev->family) == CHIP_R600) ||
1876 ((rdev->family) == CHIP_RV630) ||
1877 ((rdev->family) == CHIP_RV610) ||
1878 ((rdev->family) == CHIP_RV620) ||
1879 ((rdev->family) == CHIP_RS780) ||
1880 ((rdev->family) == CHIP_RS880)) {
1881 WREG32(DB_DEBUG, PREZ_MUST_WAIT_FOR_POSTZ_DONE);
1882 } else {
1883 WREG32(DB_DEBUG, 0);
1884 }
1885 WREG32(DB_WATERMARKS, (DEPTH_FREE(4) | DEPTH_CACHELINE_FREE(16) |
1886 DEPTH_FLUSH(16) | DEPTH_PENDING_FREE(4)));
1887
1888 WREG32(PA_SC_MULTI_CHIP_CNTL, 0);
1889 WREG32(VGT_NUM_INSTANCES, 0);
1890
1891 WREG32(SPI_CONFIG_CNTL, GPR_WRITE_PRIORITY(0));
1892 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(0));
1893
1894 tmp = RREG32(SQ_MS_FIFO_SIZES);
1895 if (((rdev->family) == CHIP_RV610) ||
1896 ((rdev->family) == CHIP_RV620) ||
1897 ((rdev->family) == CHIP_RS780) ||
1898 ((rdev->family) == CHIP_RS880)) {
1899 tmp = (CACHE_FIFO_SIZE(0xa) |
1900 FETCH_FIFO_HIWATER(0xa) |
1901 DONE_FIFO_HIWATER(0xe0) |
1902 ALU_UPDATE_FIFO_HIWATER(0x8));
1903 } else if (((rdev->family) == CHIP_R600) ||
1904 ((rdev->family) == CHIP_RV630)) {
1905 tmp &= ~DONE_FIFO_HIWATER(0xff);
1906 tmp |= DONE_FIFO_HIWATER(0x4);
1907 }
1908 WREG32(SQ_MS_FIFO_SIZES, tmp);
1909
1910
1911
1912
1913 sq_config = RREG32(SQ_CONFIG);
1914 sq_config &= ~(PS_PRIO(3) |
1915 VS_PRIO(3) |
1916 GS_PRIO(3) |
1917 ES_PRIO(3));
1918 sq_config |= (DX9_CONSTS |
1919 VC_ENABLE |
1920 PS_PRIO(0) |
1921 VS_PRIO(1) |
1922 GS_PRIO(2) |
1923 ES_PRIO(3));
1924
1925 if ((rdev->family) == CHIP_R600) {
1926 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(124) |
1927 NUM_VS_GPRS(124) |
1928 NUM_CLAUSE_TEMP_GPRS(4));
1929 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(0) |
1930 NUM_ES_GPRS(0));
1931 sq_thread_resource_mgmt = (NUM_PS_THREADS(136) |
1932 NUM_VS_THREADS(48) |
1933 NUM_GS_THREADS(4) |
1934 NUM_ES_THREADS(4));
1935 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(128) |
1936 NUM_VS_STACK_ENTRIES(128));
1937 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(0) |
1938 NUM_ES_STACK_ENTRIES(0));
1939 } else if (((rdev->family) == CHIP_RV610) ||
1940 ((rdev->family) == CHIP_RV620) ||
1941 ((rdev->family) == CHIP_RS780) ||
1942 ((rdev->family) == CHIP_RS880)) {
1943
1944 sq_config &= ~VC_ENABLE;
1945
1946 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
1947 NUM_VS_GPRS(44) |
1948 NUM_CLAUSE_TEMP_GPRS(2));
1949 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(17) |
1950 NUM_ES_GPRS(17));
1951 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
1952 NUM_VS_THREADS(78) |
1953 NUM_GS_THREADS(4) |
1954 NUM_ES_THREADS(31));
1955 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(40) |
1956 NUM_VS_STACK_ENTRIES(40));
1957 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(32) |
1958 NUM_ES_STACK_ENTRIES(16));
1959 } else if (((rdev->family) == CHIP_RV630) ||
1960 ((rdev->family) == CHIP_RV635)) {
1961 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
1962 NUM_VS_GPRS(44) |
1963 NUM_CLAUSE_TEMP_GPRS(2));
1964 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(18) |
1965 NUM_ES_GPRS(18));
1966 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
1967 NUM_VS_THREADS(78) |
1968 NUM_GS_THREADS(4) |
1969 NUM_ES_THREADS(31));
1970 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(40) |
1971 NUM_VS_STACK_ENTRIES(40));
1972 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(32) |
1973 NUM_ES_STACK_ENTRIES(16));
1974 } else if ((rdev->family) == CHIP_RV670) {
1975 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
1976 NUM_VS_GPRS(44) |
1977 NUM_CLAUSE_TEMP_GPRS(2));
1978 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(17) |
1979 NUM_ES_GPRS(17));
1980 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
1981 NUM_VS_THREADS(78) |
1982 NUM_GS_THREADS(4) |
1983 NUM_ES_THREADS(31));
1984 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(64) |
1985 NUM_VS_STACK_ENTRIES(64));
1986 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(64) |
1987 NUM_ES_STACK_ENTRIES(64));
1988 }
1989
1990 WREG32(SQ_CONFIG, sq_config);
1991 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1992 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1993 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1994 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1995 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1996
1997 if (((rdev->family) == CHIP_RV610) ||
1998 ((rdev->family) == CHIP_RV620) ||
1999 ((rdev->family) == CHIP_RS780) ||
2000 ((rdev->family) == CHIP_RS880)) {
2001 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(TC_ONLY));
2002 } else {
2003 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(VC_AND_TC));
2004 }
2005
2006
2007 WREG32(PA_SC_AA_SAMPLE_LOCS_2S, (S0_X(0xc) | S0_Y(0x4) |
2008 S1_X(0x4) | S1_Y(0xc)));
2009 WREG32(PA_SC_AA_SAMPLE_LOCS_4S, (S0_X(0xe) | S0_Y(0xe) |
2010 S1_X(0x2) | S1_Y(0x2) |
2011 S2_X(0xa) | S2_Y(0x6) |
2012 S3_X(0x6) | S3_Y(0xa)));
2013 WREG32(PA_SC_AA_SAMPLE_LOCS_8S_WD0, (S0_X(0xe) | S0_Y(0xb) |
2014 S1_X(0x4) | S1_Y(0xc) |
2015 S2_X(0x1) | S2_Y(0x6) |
2016 S3_X(0xa) | S3_Y(0xe)));
2017 WREG32(PA_SC_AA_SAMPLE_LOCS_8S_WD1, (S4_X(0x6) | S4_Y(0x1) |
2018 S5_X(0x0) | S5_Y(0x0) |
2019 S6_X(0xb) | S6_Y(0x4) |
2020 S7_X(0x7) | S7_Y(0x8)));
2021
2022 WREG32(VGT_STRMOUT_EN, 0);
2023 tmp = rdev->config.r600.max_pipes * 16;
2024 switch (rdev->family) {
2025 case CHIP_RV610:
2026 case CHIP_RV620:
2027 case CHIP_RS780:
2028 case CHIP_RS880:
2029 tmp += 32;
2030 break;
2031 case CHIP_RV670:
2032 tmp += 128;
2033 break;
2034 default:
2035 break;
2036 }
2037 if (tmp > 256) {
2038 tmp = 256;
2039 }
2040 WREG32(VGT_ES_PER_GS, 128);
2041 WREG32(VGT_GS_PER_ES, tmp);
2042 WREG32(VGT_GS_PER_VS, 2);
2043 WREG32(VGT_GS_VERTEX_REUSE, 16);
2044
2045
2046 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
2047 WREG32(VGT_STRMOUT_EN, 0);
2048 WREG32(SX_MISC, 0);
2049 WREG32(PA_SC_MODE_CNTL, 0);
2050 WREG32(PA_SC_AA_CONFIG, 0);
2051 WREG32(PA_SC_LINE_STIPPLE, 0);
2052 WREG32(SPI_INPUT_Z, 0);
2053 WREG32(SPI_PS_IN_CONTROL_0, NUM_INTERP(2));
2054 WREG32(CB_COLOR7_FRAG, 0);
2055
2056
2057 WREG32(CB_COLOR0_BASE, 0);
2058 WREG32(CB_COLOR1_BASE, 0);
2059 WREG32(CB_COLOR2_BASE, 0);
2060 WREG32(CB_COLOR3_BASE, 0);
2061 WREG32(CB_COLOR4_BASE, 0);
2062 WREG32(CB_COLOR5_BASE, 0);
2063 WREG32(CB_COLOR6_BASE, 0);
2064 WREG32(CB_COLOR7_BASE, 0);
2065 WREG32(CB_COLOR7_FRAG, 0);
2066
2067 switch (rdev->family) {
2068 case CHIP_RV610:
2069 case CHIP_RV620:
2070 case CHIP_RS780:
2071 case CHIP_RS880:
2072 tmp = TC_L2_SIZE(8);
2073 break;
2074 case CHIP_RV630:
2075 case CHIP_RV635:
2076 tmp = TC_L2_SIZE(4);
2077 break;
2078 case CHIP_R600:
2079 tmp = TC_L2_SIZE(0) | L2_DISABLE_LATE_HIT;
2080 break;
2081 default:
2082 tmp = TC_L2_SIZE(0);
2083 break;
2084 }
2085 WREG32(TC_CNTL, tmp);
2086
2087 tmp = RREG32(HDP_HOST_PATH_CNTL);
2088 WREG32(HDP_HOST_PATH_CNTL, tmp);
2089
2090 tmp = RREG32(ARB_POP);
2091 tmp |= ENABLE_TC128;
2092 WREG32(ARB_POP, tmp);
2093
2094 WREG32(PA_SC_MULTI_CHIP_CNTL, 0);
2095 WREG32(PA_CL_ENHANCE, (CLIP_VTX_REORDER_ENA |
2096 NUM_CLIP_SEQ(3)));
2097 WREG32(PA_SC_ENHANCE, FORCE_EOV_MAX_CLK_CNT(4095));
2098 WREG32(VC_ENHANCE, 0);
2099}
2100
2101
2102
2103
2104
2105u32 r600_pciep_rreg(struct radeon_device *rdev, u32 reg)
2106{
2107 unsigned long flags;
2108 u32 r;
2109
2110 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
2111 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
2112 (void)RREG32(PCIE_PORT_INDEX);
2113 r = RREG32(PCIE_PORT_DATA);
2114 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
2115 return r;
2116}
2117
2118void r600_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
2119{
2120 unsigned long flags;
2121
2122 spin_lock_irqsave(&rdev->pciep_idx_lock, flags);
2123 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
2124 (void)RREG32(PCIE_PORT_INDEX);
2125 WREG32(PCIE_PORT_DATA, (v));
2126 (void)RREG32(PCIE_PORT_DATA);
2127 spin_unlock_irqrestore(&rdev->pciep_idx_lock, flags);
2128}
2129
2130
2131
2132
2133void r600_cp_stop(struct radeon_device *rdev)
2134{
2135 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
2136 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
2137 WREG32(SCRATCH_UMSK, 0);
2138 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
2139}
2140
2141int r600_init_microcode(struct radeon_device *rdev)
2142{
2143 const char *chip_name;
2144 const char *rlc_chip_name;
2145 const char *smc_chip_name = "RV770";
2146 size_t pfp_req_size, me_req_size, rlc_req_size, smc_req_size = 0;
2147 char fw_name[30];
2148 int err;
2149
2150 DRM_DEBUG("\n");
2151
2152 switch (rdev->family) {
2153 case CHIP_R600:
2154 chip_name = "R600";
2155 rlc_chip_name = "R600";
2156 break;
2157 case CHIP_RV610:
2158 chip_name = "RV610";
2159 rlc_chip_name = "R600";
2160 break;
2161 case CHIP_RV630:
2162 chip_name = "RV630";
2163 rlc_chip_name = "R600";
2164 break;
2165 case CHIP_RV620:
2166 chip_name = "RV620";
2167 rlc_chip_name = "R600";
2168 break;
2169 case CHIP_RV635:
2170 chip_name = "RV635";
2171 rlc_chip_name = "R600";
2172 break;
2173 case CHIP_RV670:
2174 chip_name = "RV670";
2175 rlc_chip_name = "R600";
2176 break;
2177 case CHIP_RS780:
2178 case CHIP_RS880:
2179 chip_name = "RS780";
2180 rlc_chip_name = "R600";
2181 break;
2182 case CHIP_RV770:
2183 chip_name = "RV770";
2184 rlc_chip_name = "R700";
2185 smc_chip_name = "RV770";
2186 smc_req_size = ALIGN(RV770_SMC_UCODE_SIZE, 4);
2187 break;
2188 case CHIP_RV730:
2189 chip_name = "RV730";
2190 rlc_chip_name = "R700";
2191 smc_chip_name = "RV730";
2192 smc_req_size = ALIGN(RV730_SMC_UCODE_SIZE, 4);
2193 break;
2194 case CHIP_RV710:
2195 chip_name = "RV710";
2196 rlc_chip_name = "R700";
2197 smc_chip_name = "RV710";
2198 smc_req_size = ALIGN(RV710_SMC_UCODE_SIZE, 4);
2199 break;
2200 case CHIP_RV740:
2201 chip_name = "RV730";
2202 rlc_chip_name = "R700";
2203 smc_chip_name = "RV740";
2204 smc_req_size = ALIGN(RV740_SMC_UCODE_SIZE, 4);
2205 break;
2206 case CHIP_CEDAR:
2207 chip_name = "CEDAR";
2208 rlc_chip_name = "CEDAR";
2209 smc_chip_name = "CEDAR";
2210 smc_req_size = ALIGN(CEDAR_SMC_UCODE_SIZE, 4);
2211 break;
2212 case CHIP_REDWOOD:
2213 chip_name = "REDWOOD";
2214 rlc_chip_name = "REDWOOD";
2215 smc_chip_name = "REDWOOD";
2216 smc_req_size = ALIGN(REDWOOD_SMC_UCODE_SIZE, 4);
2217 break;
2218 case CHIP_JUNIPER:
2219 chip_name = "JUNIPER";
2220 rlc_chip_name = "JUNIPER";
2221 smc_chip_name = "JUNIPER";
2222 smc_req_size = ALIGN(JUNIPER_SMC_UCODE_SIZE, 4);
2223 break;
2224 case CHIP_CYPRESS:
2225 case CHIP_HEMLOCK:
2226 chip_name = "CYPRESS";
2227 rlc_chip_name = "CYPRESS";
2228 smc_chip_name = "CYPRESS";
2229 smc_req_size = ALIGN(CYPRESS_SMC_UCODE_SIZE, 4);
2230 break;
2231 case CHIP_PALM:
2232 chip_name = "PALM";
2233 rlc_chip_name = "SUMO";
2234 break;
2235 case CHIP_SUMO:
2236 chip_name = "SUMO";
2237 rlc_chip_name = "SUMO";
2238 break;
2239 case CHIP_SUMO2:
2240 chip_name = "SUMO2";
2241 rlc_chip_name = "SUMO";
2242 break;
2243 default: BUG();
2244 }
2245
2246 if (rdev->family >= CHIP_CEDAR) {
2247 pfp_req_size = EVERGREEN_PFP_UCODE_SIZE * 4;
2248 me_req_size = EVERGREEN_PM4_UCODE_SIZE * 4;
2249 rlc_req_size = EVERGREEN_RLC_UCODE_SIZE * 4;
2250 } else if (rdev->family >= CHIP_RV770) {
2251 pfp_req_size = R700_PFP_UCODE_SIZE * 4;
2252 me_req_size = R700_PM4_UCODE_SIZE * 4;
2253 rlc_req_size = R700_RLC_UCODE_SIZE * 4;
2254 } else {
2255 pfp_req_size = R600_PFP_UCODE_SIZE * 4;
2256 me_req_size = R600_PM4_UCODE_SIZE * 12;
2257 rlc_req_size = R600_RLC_UCODE_SIZE * 4;
2258 }
2259
2260 DRM_INFO("Loading %s Microcode\n", chip_name);
2261
2262 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
2263 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
2264 if (err)
2265 goto out;
2266 if (rdev->pfp_fw->size != pfp_req_size) {
2267 printk(KERN_ERR
2268 "r600_cp: Bogus length %zu in firmware \"%s\"\n",
2269 rdev->pfp_fw->size, fw_name);
2270 err = -EINVAL;
2271 goto out;
2272 }
2273
2274 snprintf(fw_name, sizeof(fw_name), "radeon/%s_me.bin", chip_name);
2275 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
2276 if (err)
2277 goto out;
2278 if (rdev->me_fw->size != me_req_size) {
2279 printk(KERN_ERR
2280 "r600_cp: Bogus length %zu in firmware \"%s\"\n",
2281 rdev->me_fw->size, fw_name);
2282 err = -EINVAL;
2283 }
2284
2285 snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
2286 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
2287 if (err)
2288 goto out;
2289 if (rdev->rlc_fw->size != rlc_req_size) {
2290 printk(KERN_ERR
2291 "r600_rlc: Bogus length %zu in firmware \"%s\"\n",
2292 rdev->rlc_fw->size, fw_name);
2293 err = -EINVAL;
2294 }
2295
2296 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_HEMLOCK)) {
2297 snprintf(fw_name, sizeof(fw_name), "radeon/%s_smc.bin", smc_chip_name);
2298 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
2299 if (err) {
2300 printk(KERN_ERR
2301 "smc: error loading firmware \"%s\"\n",
2302 fw_name);
2303 release_firmware(rdev->smc_fw);
2304 rdev->smc_fw = NULL;
2305 err = 0;
2306 } else if (rdev->smc_fw->size != smc_req_size) {
2307 printk(KERN_ERR
2308 "smc: Bogus length %zu in firmware \"%s\"\n",
2309 rdev->smc_fw->size, fw_name);
2310 err = -EINVAL;
2311 }
2312 }
2313
2314out:
2315 if (err) {
2316 if (err != -EINVAL)
2317 printk(KERN_ERR
2318 "r600_cp: Failed to load firmware \"%s\"\n",
2319 fw_name);
2320 release_firmware(rdev->pfp_fw);
2321 rdev->pfp_fw = NULL;
2322 release_firmware(rdev->me_fw);
2323 rdev->me_fw = NULL;
2324 release_firmware(rdev->rlc_fw);
2325 rdev->rlc_fw = NULL;
2326 release_firmware(rdev->smc_fw);
2327 rdev->smc_fw = NULL;
2328 }
2329 return err;
2330}
2331
2332static int r600_cp_load_microcode(struct radeon_device *rdev)
2333{
2334 const __be32 *fw_data;
2335 int i;
2336
2337 if (!rdev->me_fw || !rdev->pfp_fw)
2338 return -EINVAL;
2339
2340 r600_cp_stop(rdev);
2341
2342 WREG32(CP_RB_CNTL,
2343#ifdef __BIG_ENDIAN
2344 BUF_SWAP_32BIT |
2345#endif
2346 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2347
2348
2349 WREG32(GRBM_SOFT_RESET, SOFT_RESET_CP);
2350 RREG32(GRBM_SOFT_RESET);
2351 mdelay(15);
2352 WREG32(GRBM_SOFT_RESET, 0);
2353
2354 WREG32(CP_ME_RAM_WADDR, 0);
2355
2356 fw_data = (const __be32 *)rdev->me_fw->data;
2357 WREG32(CP_ME_RAM_WADDR, 0);
2358 for (i = 0; i < R600_PM4_UCODE_SIZE * 3; i++)
2359 WREG32(CP_ME_RAM_DATA,
2360 be32_to_cpup(fw_data++));
2361
2362 fw_data = (const __be32 *)rdev->pfp_fw->data;
2363 WREG32(CP_PFP_UCODE_ADDR, 0);
2364 for (i = 0; i < R600_PFP_UCODE_SIZE; i++)
2365 WREG32(CP_PFP_UCODE_DATA,
2366 be32_to_cpup(fw_data++));
2367
2368 WREG32(CP_PFP_UCODE_ADDR, 0);
2369 WREG32(CP_ME_RAM_WADDR, 0);
2370 WREG32(CP_ME_RAM_RADDR, 0);
2371 return 0;
2372}
2373
2374int r600_cp_start(struct radeon_device *rdev)
2375{
2376 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2377 int r;
2378 uint32_t cp_me;
2379
2380 r = radeon_ring_lock(rdev, ring, 7);
2381 if (r) {
2382 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2383 return r;
2384 }
2385 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2386 radeon_ring_write(ring, 0x1);
2387 if (rdev->family >= CHIP_RV770) {
2388 radeon_ring_write(ring, 0x0);
2389 radeon_ring_write(ring, rdev->config.rv770.max_hw_contexts - 1);
2390 } else {
2391 radeon_ring_write(ring, 0x3);
2392 radeon_ring_write(ring, rdev->config.r600.max_hw_contexts - 1);
2393 }
2394 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2395 radeon_ring_write(ring, 0);
2396 radeon_ring_write(ring, 0);
2397 radeon_ring_unlock_commit(rdev, ring);
2398
2399 cp_me = 0xff;
2400 WREG32(R_0086D8_CP_ME_CNTL, cp_me);
2401 return 0;
2402}
2403
2404int r600_cp_resume(struct radeon_device *rdev)
2405{
2406 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2407 u32 tmp;
2408 u32 rb_bufsz;
2409 int r;
2410
2411
2412 WREG32(GRBM_SOFT_RESET, SOFT_RESET_CP);
2413 RREG32(GRBM_SOFT_RESET);
2414 mdelay(15);
2415 WREG32(GRBM_SOFT_RESET, 0);
2416
2417
2418 rb_bufsz = order_base_2(ring->ring_size / 8);
2419 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2420#ifdef __BIG_ENDIAN
2421 tmp |= BUF_SWAP_32BIT;
2422#endif
2423 WREG32(CP_RB_CNTL, tmp);
2424 WREG32(CP_SEM_WAIT_TIMER, 0x0);
2425
2426
2427 WREG32(CP_RB_WPTR_DELAY, 0);
2428
2429
2430 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2431 WREG32(CP_RB_RPTR_WR, 0);
2432 ring->wptr = 0;
2433 WREG32(CP_RB_WPTR, ring->wptr);
2434
2435
2436 WREG32(CP_RB_RPTR_ADDR,
2437 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2438 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2439 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2440
2441 if (rdev->wb.enabled)
2442 WREG32(SCRATCH_UMSK, 0xff);
2443 else {
2444 tmp |= RB_NO_UPDATE;
2445 WREG32(SCRATCH_UMSK, 0);
2446 }
2447
2448 mdelay(1);
2449 WREG32(CP_RB_CNTL, tmp);
2450
2451 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2452 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2453
2454 ring->rptr = RREG32(CP_RB_RPTR);
2455
2456 r600_cp_start(rdev);
2457 ring->ready = true;
2458 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2459 if (r) {
2460 ring->ready = false;
2461 return r;
2462 }
2463 return 0;
2464}
2465
2466void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size)
2467{
2468 u32 rb_bufsz;
2469 int r;
2470
2471
2472 rb_bufsz = order_base_2(ring_size / 8);
2473 ring_size = (1 << (rb_bufsz + 1)) * 4;
2474 ring->ring_size = ring_size;
2475 ring->align_mask = 16 - 1;
2476
2477 if (radeon_ring_supports_scratch_reg(rdev, ring)) {
2478 r = radeon_scratch_get(rdev, &ring->rptr_save_reg);
2479 if (r) {
2480 DRM_ERROR("failed to get scratch reg for rptr save (%d).\n", r);
2481 ring->rptr_save_reg = 0;
2482 }
2483 }
2484}
2485
2486void r600_cp_fini(struct radeon_device *rdev)
2487{
2488 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2489 r600_cp_stop(rdev);
2490 radeon_ring_fini(rdev, ring);
2491 radeon_scratch_free(rdev, ring->rptr_save_reg);
2492}
2493
2494
2495
2496
2497void r600_scratch_init(struct radeon_device *rdev)
2498{
2499 int i;
2500
2501 rdev->scratch.num_reg = 7;
2502 rdev->scratch.reg_base = SCRATCH_REG0;
2503 for (i = 0; i < rdev->scratch.num_reg; i++) {
2504 rdev->scratch.free[i] = true;
2505 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
2506 }
2507}
2508
2509int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
2510{
2511 uint32_t scratch;
2512 uint32_t tmp = 0;
2513 unsigned i;
2514 int r;
2515
2516 r = radeon_scratch_get(rdev, &scratch);
2517 if (r) {
2518 DRM_ERROR("radeon: cp failed to get scratch reg (%d).\n", r);
2519 return r;
2520 }
2521 WREG32(scratch, 0xCAFEDEAD);
2522 r = radeon_ring_lock(rdev, ring, 3);
2523 if (r) {
2524 DRM_ERROR("radeon: cp failed to lock ring %d (%d).\n", ring->idx, r);
2525 radeon_scratch_free(rdev, scratch);
2526 return r;
2527 }
2528 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2529 radeon_ring_write(ring, ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2530 radeon_ring_write(ring, 0xDEADBEEF);
2531 radeon_ring_unlock_commit(rdev, ring);
2532 for (i = 0; i < rdev->usec_timeout; i++) {
2533 tmp = RREG32(scratch);
2534 if (tmp == 0xDEADBEEF)
2535 break;
2536 DRM_UDELAY(1);
2537 }
2538 if (i < rdev->usec_timeout) {
2539 DRM_INFO("ring test on %d succeeded in %d usecs\n", ring->idx, i);
2540 } else {
2541 DRM_ERROR("radeon: ring %d test failed (scratch(0x%04X)=0x%08X)\n",
2542 ring->idx, scratch, tmp);
2543 r = -EINVAL;
2544 }
2545 radeon_scratch_free(rdev, scratch);
2546 return r;
2547}
2548
2549
2550
2551
2552
2553void r600_fence_ring_emit(struct radeon_device *rdev,
2554 struct radeon_fence *fence)
2555{
2556 struct radeon_ring *ring = &rdev->ring[fence->ring];
2557
2558 if (rdev->wb.use_event) {
2559 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
2560
2561 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
2562 radeon_ring_write(ring, PACKET3_TC_ACTION_ENA |
2563 PACKET3_VC_ACTION_ENA |
2564 PACKET3_SH_ACTION_ENA);
2565 radeon_ring_write(ring, 0xFFFFFFFF);
2566 radeon_ring_write(ring, 0);
2567 radeon_ring_write(ring, 10);
2568
2569 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
2570 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT_TS) | EVENT_INDEX(5));
2571 radeon_ring_write(ring, addr & 0xffffffff);
2572 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
2573 radeon_ring_write(ring, fence->seq);
2574 radeon_ring_write(ring, 0);
2575 } else {
2576
2577 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
2578 radeon_ring_write(ring, PACKET3_TC_ACTION_ENA |
2579 PACKET3_VC_ACTION_ENA |
2580 PACKET3_SH_ACTION_ENA);
2581 radeon_ring_write(ring, 0xFFFFFFFF);
2582 radeon_ring_write(ring, 0);
2583 radeon_ring_write(ring, 10);
2584 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
2585 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT) | EVENT_INDEX(0));
2586
2587 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2588 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2589 radeon_ring_write(ring, WAIT_3D_IDLE_bit | WAIT_3D_IDLECLEAN_bit);
2590
2591 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2592 radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2593 radeon_ring_write(ring, fence->seq);
2594
2595 radeon_ring_write(ring, PACKET0(CP_INT_STATUS, 0));
2596 radeon_ring_write(ring, RB_INT_STAT);
2597 }
2598}
2599
2600void r600_semaphore_ring_emit(struct radeon_device *rdev,
2601 struct radeon_ring *ring,
2602 struct radeon_semaphore *semaphore,
2603 bool emit_wait)
2604{
2605 uint64_t addr = semaphore->gpu_addr;
2606 unsigned sel = emit_wait ? PACKET3_SEM_SEL_WAIT : PACKET3_SEM_SEL_SIGNAL;
2607
2608 if (rdev->family < CHIP_CAYMAN)
2609 sel |= PACKET3_SEM_WAIT_ON_SIGNAL;
2610
2611 radeon_ring_write(ring, PACKET3(PACKET3_MEM_SEMAPHORE, 1));
2612 radeon_ring_write(ring, addr & 0xffffffff);
2613 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | sel);
2614}
2615
2616
2617
2618
2619
2620
2621
2622
2623
2624
2625
2626
2627
2628
2629int r600_copy_cpdma(struct radeon_device *rdev,
2630 uint64_t src_offset, uint64_t dst_offset,
2631 unsigned num_gpu_pages,
2632 struct radeon_fence **fence)
2633{
2634 struct radeon_semaphore *sem = NULL;
2635 int ring_index = rdev->asic->copy.blit_ring_index;
2636 struct radeon_ring *ring = &rdev->ring[ring_index];
2637 u32 size_in_bytes, cur_size_in_bytes, tmp;
2638 int i, num_loops;
2639 int r = 0;
2640
2641 r = radeon_semaphore_create(rdev, &sem);
2642 if (r) {
2643 DRM_ERROR("radeon: moving bo (%d).\n", r);
2644 return r;
2645 }
2646
2647 size_in_bytes = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT);
2648 num_loops = DIV_ROUND_UP(size_in_bytes, 0x1fffff);
2649 r = radeon_ring_lock(rdev, ring, num_loops * 6 + 24);
2650 if (r) {
2651 DRM_ERROR("radeon: moving bo (%d).\n", r);
2652 radeon_semaphore_free(rdev, &sem, NULL);
2653 return r;
2654 }
2655
2656 if (radeon_fence_need_sync(*fence, ring->idx)) {
2657 radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
2658 ring->idx);
2659 radeon_fence_note_sync(*fence, ring->idx);
2660 } else {
2661 radeon_semaphore_free(rdev, &sem, NULL);
2662 }
2663
2664 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2665 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2666 radeon_ring_write(ring, WAIT_3D_IDLE_bit);
2667 for (i = 0; i < num_loops; i++) {
2668 cur_size_in_bytes = size_in_bytes;
2669 if (cur_size_in_bytes > 0x1fffff)
2670 cur_size_in_bytes = 0x1fffff;
2671 size_in_bytes -= cur_size_in_bytes;
2672 tmp = upper_32_bits(src_offset) & 0xff;
2673 if (size_in_bytes == 0)
2674 tmp |= PACKET3_CP_DMA_CP_SYNC;
2675 radeon_ring_write(ring, PACKET3(PACKET3_CP_DMA, 4));
2676 radeon_ring_write(ring, src_offset & 0xffffffff);
2677 radeon_ring_write(ring, tmp);
2678 radeon_ring_write(ring, dst_offset & 0xffffffff);
2679 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
2680 radeon_ring_write(ring, cur_size_in_bytes);
2681 src_offset += cur_size_in_bytes;
2682 dst_offset += cur_size_in_bytes;
2683 }
2684 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2685 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2686 radeon_ring_write(ring, WAIT_CP_DMA_IDLE_bit);
2687
2688 r = radeon_fence_emit(rdev, fence, ring->idx);
2689 if (r) {
2690 radeon_ring_unlock_undo(rdev, ring);
2691 return r;
2692 }
2693
2694 radeon_ring_unlock_commit(rdev, ring);
2695 radeon_semaphore_free(rdev, &sem, *fence);
2696
2697 return r;
2698}
2699
2700int r600_set_surface_reg(struct radeon_device *rdev, int reg,
2701 uint32_t tiling_flags, uint32_t pitch,
2702 uint32_t offset, uint32_t obj_size)
2703{
2704
2705 return 0;
2706}
2707
2708void r600_clear_surface_reg(struct radeon_device *rdev, int reg)
2709{
2710
2711}
2712
2713static int r600_startup(struct radeon_device *rdev)
2714{
2715 struct radeon_ring *ring;
2716 int r;
2717
2718
2719 r600_pcie_gen2_enable(rdev);
2720
2721
2722 r = r600_vram_scratch_init(rdev);
2723 if (r)
2724 return r;
2725
2726 r600_mc_program(rdev);
2727
2728 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
2729 r = r600_init_microcode(rdev);
2730 if (r) {
2731 DRM_ERROR("Failed to load firmware!\n");
2732 return r;
2733 }
2734 }
2735
2736 if (rdev->flags & RADEON_IS_AGP) {
2737 r600_agp_enable(rdev);
2738 } else {
2739 r = r600_pcie_gart_enable(rdev);
2740 if (r)
2741 return r;
2742 }
2743 r600_gpu_init(rdev);
2744
2745
2746 r = radeon_wb_init(rdev);
2747 if (r)
2748 return r;
2749
2750 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
2751 if (r) {
2752 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
2753 return r;
2754 }
2755
2756 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
2757 if (r) {
2758 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
2759 return r;
2760 }
2761
2762
2763 if (!rdev->irq.installed) {
2764 r = radeon_irq_kms_init(rdev);
2765 if (r)
2766 return r;
2767 }
2768
2769 r = r600_irq_init(rdev);
2770 if (r) {
2771 DRM_ERROR("radeon: IH init failed (%d).\n", r);
2772 radeon_irq_kms_fini(rdev);
2773 return r;
2774 }
2775 r600_irq_set(rdev);
2776
2777 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2778 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
2779 R600_CP_RB_RPTR, R600_CP_RB_WPTR,
2780 RADEON_CP_PACKET2);
2781 if (r)
2782 return r;
2783
2784 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
2785 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
2786 DMA_RB_RPTR, DMA_RB_WPTR,
2787 DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0));
2788 if (r)
2789 return r;
2790
2791 r = r600_cp_load_microcode(rdev);
2792 if (r)
2793 return r;
2794 r = r600_cp_resume(rdev);
2795 if (r)
2796 return r;
2797
2798 r = r600_dma_resume(rdev);
2799 if (r)
2800 return r;
2801
2802 r = radeon_ib_pool_init(rdev);
2803 if (r) {
2804 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
2805 return r;
2806 }
2807
2808 r = r600_audio_init(rdev);
2809 if (r) {
2810 DRM_ERROR("radeon: audio init failed\n");
2811 return r;
2812 }
2813
2814 return 0;
2815}
2816
2817void r600_vga_set_state(struct radeon_device *rdev, bool state)
2818{
2819 uint32_t temp;
2820
2821 temp = RREG32(CONFIG_CNTL);
2822 if (state == false) {
2823 temp &= ~(1<<0);
2824 temp |= (1<<1);
2825 } else {
2826 temp &= ~(1<<1);
2827 }
2828 WREG32(CONFIG_CNTL, temp);
2829}
2830
2831int r600_resume(struct radeon_device *rdev)
2832{
2833 int r;
2834
2835
2836
2837
2838
2839
2840 atom_asic_init(rdev->mode_info.atom_context);
2841
2842 rdev->accel_working = true;
2843 r = r600_startup(rdev);
2844 if (r) {
2845 DRM_ERROR("r600 startup failed on resume\n");
2846 rdev->accel_working = false;
2847 return r;
2848 }
2849
2850 return r;
2851}
2852
2853int r600_suspend(struct radeon_device *rdev)
2854{
2855 r600_audio_fini(rdev);
2856 r600_cp_stop(rdev);
2857 r600_dma_stop(rdev);
2858 r600_irq_suspend(rdev);
2859 radeon_wb_disable(rdev);
2860 r600_pcie_gart_disable(rdev);
2861
2862 return 0;
2863}
2864
2865
2866
2867
2868
2869
2870
2871int r600_init(struct radeon_device *rdev)
2872{
2873 int r;
2874
2875 if (r600_debugfs_mc_info_init(rdev)) {
2876 DRM_ERROR("Failed to register debugfs file for mc !\n");
2877 }
2878
2879 if (!radeon_get_bios(rdev)) {
2880 if (ASIC_IS_AVIVO(rdev))
2881 return -EINVAL;
2882 }
2883
2884 if (!rdev->is_atom_bios) {
2885 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
2886 return -EINVAL;
2887 }
2888 r = radeon_atombios_init(rdev);
2889 if (r)
2890 return r;
2891
2892 if (!radeon_card_posted(rdev)) {
2893 if (!rdev->bios) {
2894 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2895 return -EINVAL;
2896 }
2897 DRM_INFO("GPU not posted. posting now...\n");
2898 atom_asic_init(rdev->mode_info.atom_context);
2899 }
2900
2901 r600_scratch_init(rdev);
2902
2903 radeon_surface_init(rdev);
2904
2905 radeon_get_clock_info(rdev->ddev);
2906
2907 r = radeon_fence_driver_init(rdev);
2908 if (r)
2909 return r;
2910 if (rdev->flags & RADEON_IS_AGP) {
2911 r = radeon_agp_init(rdev);
2912 if (r)
2913 radeon_agp_disable(rdev);
2914 }
2915 r = r600_mc_init(rdev);
2916 if (r)
2917 return r;
2918
2919 r = radeon_bo_init(rdev);
2920 if (r)
2921 return r;
2922
2923 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
2924 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
2925
2926 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
2927 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
2928
2929 rdev->ih.ring_obj = NULL;
2930 r600_ih_ring_init(rdev, 64 * 1024);
2931
2932 r = r600_pcie_gart_init(rdev);
2933 if (r)
2934 return r;
2935
2936 rdev->accel_working = true;
2937 r = r600_startup(rdev);
2938 if (r) {
2939 dev_err(rdev->dev, "disabling GPU acceleration\n");
2940 r600_cp_fini(rdev);
2941 r600_dma_fini(rdev);
2942 r600_irq_fini(rdev);
2943 radeon_wb_fini(rdev);
2944 radeon_ib_pool_fini(rdev);
2945 radeon_irq_kms_fini(rdev);
2946 r600_pcie_gart_fini(rdev);
2947 rdev->accel_working = false;
2948 }
2949
2950 return 0;
2951}
2952
2953void r600_fini(struct radeon_device *rdev)
2954{
2955 r600_audio_fini(rdev);
2956 r600_cp_fini(rdev);
2957 r600_dma_fini(rdev);
2958 r600_irq_fini(rdev);
2959 radeon_wb_fini(rdev);
2960 radeon_ib_pool_fini(rdev);
2961 radeon_irq_kms_fini(rdev);
2962 r600_pcie_gart_fini(rdev);
2963 r600_vram_scratch_fini(rdev);
2964 radeon_agp_fini(rdev);
2965 radeon_gem_fini(rdev);
2966 radeon_fence_driver_fini(rdev);
2967 radeon_bo_fini(rdev);
2968 radeon_atombios_fini(rdev);
2969 kfree(rdev->bios);
2970 rdev->bios = NULL;
2971}
2972
2973
2974
2975
2976
2977void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2978{
2979 struct radeon_ring *ring = &rdev->ring[ib->ring];
2980 u32 next_rptr;
2981
2982 if (ring->rptr_save_reg) {
2983 next_rptr = ring->wptr + 3 + 4;
2984 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2985 radeon_ring_write(ring, ((ring->rptr_save_reg -
2986 PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2987 radeon_ring_write(ring, next_rptr);
2988 } else if (rdev->wb.enabled) {
2989 next_rptr = ring->wptr + 5 + 4;
2990 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2991 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2992 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2993 radeon_ring_write(ring, next_rptr);
2994 radeon_ring_write(ring, 0);
2995 }
2996
2997 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2998 radeon_ring_write(ring,
2999#ifdef __BIG_ENDIAN
3000 (2 << 0) |
3001#endif
3002 (ib->gpu_addr & 0xFFFFFFFC));
3003 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3004 radeon_ring_write(ring, ib->length_dw);
3005}
3006
3007int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
3008{
3009 struct radeon_ib ib;
3010 uint32_t scratch;
3011 uint32_t tmp = 0;
3012 unsigned i;
3013 int r;
3014
3015 r = radeon_scratch_get(rdev, &scratch);
3016 if (r) {
3017 DRM_ERROR("radeon: failed to get scratch reg (%d).\n", r);
3018 return r;
3019 }
3020 WREG32(scratch, 0xCAFEDEAD);
3021 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
3022 if (r) {
3023 DRM_ERROR("radeon: failed to get ib (%d).\n", r);
3024 goto free_scratch;
3025 }
3026 ib.ptr[0] = PACKET3(PACKET3_SET_CONFIG_REG, 1);
3027 ib.ptr[1] = ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
3028 ib.ptr[2] = 0xDEADBEEF;
3029 ib.length_dw = 3;
3030 r = radeon_ib_schedule(rdev, &ib, NULL);
3031 if (r) {
3032 DRM_ERROR("radeon: failed to schedule ib (%d).\n", r);
3033 goto free_ib;
3034 }
3035 r = radeon_fence_wait(ib.fence, false);
3036 if (r) {
3037 DRM_ERROR("radeon: fence wait failed (%d).\n", r);
3038 goto free_ib;
3039 }
3040 for (i = 0; i < rdev->usec_timeout; i++) {
3041 tmp = RREG32(scratch);
3042 if (tmp == 0xDEADBEEF)
3043 break;
3044 DRM_UDELAY(1);
3045 }
3046 if (i < rdev->usec_timeout) {
3047 DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
3048 } else {
3049 DRM_ERROR("radeon: ib test failed (scratch(0x%04X)=0x%08X)\n",
3050 scratch, tmp);
3051 r = -EINVAL;
3052 }
3053free_ib:
3054 radeon_ib_free(rdev, &ib);
3055free_scratch:
3056 radeon_scratch_free(rdev, scratch);
3057 return r;
3058}
3059
3060
3061
3062
3063
3064
3065
3066
3067
3068
3069
3070
3071void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size)
3072{
3073 u32 rb_bufsz;
3074
3075
3076 rb_bufsz = order_base_2(ring_size / 4);
3077 ring_size = (1 << rb_bufsz) * 4;
3078 rdev->ih.ring_size = ring_size;
3079 rdev->ih.ptr_mask = rdev->ih.ring_size - 1;
3080 rdev->ih.rptr = 0;
3081}
3082
3083int r600_ih_ring_alloc(struct radeon_device *rdev)
3084{
3085 int r;
3086
3087
3088 if (rdev->ih.ring_obj == NULL) {
3089 r = radeon_bo_create(rdev, rdev->ih.ring_size,
3090 PAGE_SIZE, true,
3091 RADEON_GEM_DOMAIN_GTT,
3092 NULL, &rdev->ih.ring_obj);
3093 if (r) {
3094 DRM_ERROR("radeon: failed to create ih ring buffer (%d).\n", r);
3095 return r;
3096 }
3097 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
3098 if (unlikely(r != 0))
3099 return r;
3100 r = radeon_bo_pin(rdev->ih.ring_obj,
3101 RADEON_GEM_DOMAIN_GTT,
3102 &rdev->ih.gpu_addr);
3103 if (r) {
3104 radeon_bo_unreserve(rdev->ih.ring_obj);
3105 DRM_ERROR("radeon: failed to pin ih ring buffer (%d).\n", r);
3106 return r;
3107 }
3108 r = radeon_bo_kmap(rdev->ih.ring_obj,
3109 (void **)&rdev->ih.ring);
3110 radeon_bo_unreserve(rdev->ih.ring_obj);
3111 if (r) {
3112 DRM_ERROR("radeon: failed to map ih ring buffer (%d).\n", r);
3113 return r;
3114 }
3115 }
3116 return 0;
3117}
3118
3119void r600_ih_ring_fini(struct radeon_device *rdev)
3120{
3121 int r;
3122 if (rdev->ih.ring_obj) {
3123 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
3124 if (likely(r == 0)) {
3125 radeon_bo_kunmap(rdev->ih.ring_obj);
3126 radeon_bo_unpin(rdev->ih.ring_obj);
3127 radeon_bo_unreserve(rdev->ih.ring_obj);
3128 }
3129 radeon_bo_unref(&rdev->ih.ring_obj);
3130 rdev->ih.ring = NULL;
3131 rdev->ih.ring_obj = NULL;
3132 }
3133}
3134
3135void r600_rlc_stop(struct radeon_device *rdev)
3136{
3137
3138 if ((rdev->family >= CHIP_RV770) &&
3139 (rdev->family <= CHIP_RV740)) {
3140
3141 WREG32(SRBM_SOFT_RESET, SOFT_RESET_RLC);
3142 RREG32(SRBM_SOFT_RESET);
3143 mdelay(15);
3144 WREG32(SRBM_SOFT_RESET, 0);
3145 RREG32(SRBM_SOFT_RESET);
3146 }
3147
3148 WREG32(RLC_CNTL, 0);
3149}
3150
3151static void r600_rlc_start(struct radeon_device *rdev)
3152{
3153 WREG32(RLC_CNTL, RLC_ENABLE);
3154}
3155
3156static int r600_rlc_resume(struct radeon_device *rdev)
3157{
3158 u32 i;
3159 const __be32 *fw_data;
3160
3161 if (!rdev->rlc_fw)
3162 return -EINVAL;
3163
3164 r600_rlc_stop(rdev);
3165
3166 WREG32(RLC_HB_CNTL, 0);
3167
3168 WREG32(RLC_HB_BASE, 0);
3169 WREG32(RLC_HB_RPTR, 0);
3170 WREG32(RLC_HB_WPTR, 0);
3171 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
3172 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
3173 WREG32(RLC_MC_CNTL, 0);
3174 WREG32(RLC_UCODE_CNTL, 0);
3175
3176 fw_data = (const __be32 *)rdev->rlc_fw->data;
3177 if (rdev->family >= CHIP_RV770) {
3178 for (i = 0; i < R700_RLC_UCODE_SIZE; i++) {
3179 WREG32(RLC_UCODE_ADDR, i);
3180 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
3181 }
3182 } else {
3183 for (i = 0; i < R600_RLC_UCODE_SIZE; i++) {
3184 WREG32(RLC_UCODE_ADDR, i);
3185 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
3186 }
3187 }
3188 WREG32(RLC_UCODE_ADDR, 0);
3189
3190 r600_rlc_start(rdev);
3191
3192 return 0;
3193}
3194
3195static void r600_enable_interrupts(struct radeon_device *rdev)
3196{
3197 u32 ih_cntl = RREG32(IH_CNTL);
3198 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
3199
3200 ih_cntl |= ENABLE_INTR;
3201 ih_rb_cntl |= IH_RB_ENABLE;
3202 WREG32(IH_CNTL, ih_cntl);
3203 WREG32(IH_RB_CNTL, ih_rb_cntl);
3204 rdev->ih.enabled = true;
3205}
3206
3207void r600_disable_interrupts(struct radeon_device *rdev)
3208{
3209 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
3210 u32 ih_cntl = RREG32(IH_CNTL);
3211
3212 ih_rb_cntl &= ~IH_RB_ENABLE;
3213 ih_cntl &= ~ENABLE_INTR;
3214 WREG32(IH_RB_CNTL, ih_rb_cntl);
3215 WREG32(IH_CNTL, ih_cntl);
3216
3217 WREG32(IH_RB_RPTR, 0);
3218 WREG32(IH_RB_WPTR, 0);
3219 rdev->ih.enabled = false;
3220 rdev->ih.rptr = 0;
3221}
3222
3223static void r600_disable_interrupt_state(struct radeon_device *rdev)
3224{
3225 u32 tmp;
3226
3227 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3228 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3229 WREG32(DMA_CNTL, tmp);
3230 WREG32(GRBM_INT_CNTL, 0);
3231 WREG32(DxMODE_INT_MASK, 0);
3232 WREG32(D1GRPH_INTERRUPT_CONTROL, 0);
3233 WREG32(D2GRPH_INTERRUPT_CONTROL, 0);
3234 if (ASIC_IS_DCE3(rdev)) {
3235 WREG32(DCE3_DACA_AUTODETECT_INT_CONTROL, 0);
3236 WREG32(DCE3_DACB_AUTODETECT_INT_CONTROL, 0);
3237 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3238 WREG32(DC_HPD1_INT_CONTROL, tmp);
3239 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3240 WREG32(DC_HPD2_INT_CONTROL, tmp);
3241 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3242 WREG32(DC_HPD3_INT_CONTROL, tmp);
3243 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3244 WREG32(DC_HPD4_INT_CONTROL, tmp);
3245 if (ASIC_IS_DCE32(rdev)) {
3246 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3247 WREG32(DC_HPD5_INT_CONTROL, tmp);
3248 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3249 WREG32(DC_HPD6_INT_CONTROL, tmp);
3250 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3251 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, tmp);
3252 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3253 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, tmp);
3254 } else {
3255 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3256 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
3257 tmp = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3258 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, tmp);
3259 }
3260 } else {
3261 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
3262 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3263 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3264 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
3265 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3266 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
3267 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
3268 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
3269 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3270 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
3271 tmp = RREG32(HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3272 WREG32(HDMI1_AUDIO_PACKET_CONTROL, tmp);
3273 }
3274}
3275
3276int r600_irq_init(struct radeon_device *rdev)
3277{
3278 int ret = 0;
3279 int rb_bufsz;
3280 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
3281
3282
3283 ret = r600_ih_ring_alloc(rdev);
3284 if (ret)
3285 return ret;
3286
3287
3288 r600_disable_interrupts(rdev);
3289
3290
3291 if (rdev->family >= CHIP_CEDAR)
3292 ret = evergreen_rlc_resume(rdev);
3293 else
3294 ret = r600_rlc_resume(rdev);
3295 if (ret) {
3296 r600_ih_ring_fini(rdev);
3297 return ret;
3298 }
3299
3300
3301
3302 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8);
3303 interrupt_cntl = RREG32(INTERRUPT_CNTL);
3304
3305
3306
3307 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
3308
3309 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
3310 WREG32(INTERRUPT_CNTL, interrupt_cntl);
3311
3312 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
3313 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
3314
3315 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
3316 IH_WPTR_OVERFLOW_CLEAR |
3317 (rb_bufsz << 1));
3318
3319 if (rdev->wb.enabled)
3320 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
3321
3322
3323 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
3324 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
3325
3326 WREG32(IH_RB_CNTL, ih_rb_cntl);
3327
3328
3329 WREG32(IH_RB_RPTR, 0);
3330 WREG32(IH_RB_WPTR, 0);
3331
3332
3333 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10);
3334
3335 if (rdev->msi_enabled)
3336 ih_cntl |= RPTR_REARM;
3337 WREG32(IH_CNTL, ih_cntl);
3338
3339
3340 if (rdev->family >= CHIP_CEDAR)
3341 evergreen_disable_interrupt_state(rdev);
3342 else
3343 r600_disable_interrupt_state(rdev);
3344
3345
3346 pci_set_master(rdev->pdev);
3347
3348
3349 r600_enable_interrupts(rdev);
3350
3351 return ret;
3352}
3353
3354void r600_irq_suspend(struct radeon_device *rdev)
3355{
3356 r600_irq_disable(rdev);
3357 r600_rlc_stop(rdev);
3358}
3359
3360void r600_irq_fini(struct radeon_device *rdev)
3361{
3362 r600_irq_suspend(rdev);
3363 r600_ih_ring_fini(rdev);
3364}
3365
3366int r600_irq_set(struct radeon_device *rdev)
3367{
3368 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3369 u32 mode_int = 0;
3370 u32 hpd1, hpd2, hpd3, hpd4 = 0, hpd5 = 0, hpd6 = 0;
3371 u32 grbm_int_cntl = 0;
3372 u32 hdmi0, hdmi1;
3373 u32 d1grph = 0, d2grph = 0;
3374 u32 dma_cntl;
3375 u32 thermal_int = 0;
3376
3377 if (!rdev->irq.installed) {
3378 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3379 return -EINVAL;
3380 }
3381
3382 if (!rdev->ih.enabled) {
3383 r600_disable_interrupts(rdev);
3384
3385 r600_disable_interrupt_state(rdev);
3386 return 0;
3387 }
3388
3389 if (ASIC_IS_DCE3(rdev)) {
3390 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3391 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3392 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3393 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3394 if (ASIC_IS_DCE32(rdev)) {
3395 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3396 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3397 hdmi0 = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3398 hdmi1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3399 } else {
3400 hdmi0 = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3401 hdmi1 = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3402 }
3403 } else {
3404 hpd1 = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3405 hpd2 = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3406 hpd3 = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3407 hdmi0 = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3408 hdmi1 = RREG32(HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3409 }
3410
3411 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3412
3413 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) {
3414 thermal_int = RREG32(CG_THERMAL_INT) &
3415 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
3416 } else if (rdev->family >= CHIP_RV770) {
3417 thermal_int = RREG32(RV770_CG_THERMAL_INT) &
3418 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
3419 }
3420 if (rdev->irq.dpm_thermal) {
3421 DRM_DEBUG("dpm thermal\n");
3422 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
3423 }
3424
3425 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3426 DRM_DEBUG("r600_irq_set: sw int\n");
3427 cp_int_cntl |= RB_INT_ENABLE;
3428 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3429 }
3430
3431 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3432 DRM_DEBUG("r600_irq_set: sw int dma\n");
3433 dma_cntl |= TRAP_ENABLE;
3434 }
3435
3436 if (rdev->irq.crtc_vblank_int[0] ||
3437 atomic_read(&rdev->irq.pflip[0])) {
3438 DRM_DEBUG("r600_irq_set: vblank 0\n");
3439 mode_int |= D1MODE_VBLANK_INT_MASK;
3440 }
3441 if (rdev->irq.crtc_vblank_int[1] ||
3442 atomic_read(&rdev->irq.pflip[1])) {
3443 DRM_DEBUG("r600_irq_set: vblank 1\n");
3444 mode_int |= D2MODE_VBLANK_INT_MASK;
3445 }
3446 if (rdev->irq.hpd[0]) {
3447 DRM_DEBUG("r600_irq_set: hpd 1\n");
3448 hpd1 |= DC_HPDx_INT_EN;
3449 }
3450 if (rdev->irq.hpd[1]) {
3451 DRM_DEBUG("r600_irq_set: hpd 2\n");
3452 hpd2 |= DC_HPDx_INT_EN;
3453 }
3454 if (rdev->irq.hpd[2]) {
3455 DRM_DEBUG("r600_irq_set: hpd 3\n");
3456 hpd3 |= DC_HPDx_INT_EN;
3457 }
3458 if (rdev->irq.hpd[3]) {
3459 DRM_DEBUG("r600_irq_set: hpd 4\n");
3460 hpd4 |= DC_HPDx_INT_EN;
3461 }
3462 if (rdev->irq.hpd[4]) {
3463 DRM_DEBUG("r600_irq_set: hpd 5\n");
3464 hpd5 |= DC_HPDx_INT_EN;
3465 }
3466 if (rdev->irq.hpd[5]) {
3467 DRM_DEBUG("r600_irq_set: hpd 6\n");
3468 hpd6 |= DC_HPDx_INT_EN;
3469 }
3470 if (rdev->irq.afmt[0]) {
3471 DRM_DEBUG("r600_irq_set: hdmi 0\n");
3472 hdmi0 |= HDMI0_AZ_FORMAT_WTRIG_MASK;
3473 }
3474 if (rdev->irq.afmt[1]) {
3475 DRM_DEBUG("r600_irq_set: hdmi 0\n");
3476 hdmi1 |= HDMI0_AZ_FORMAT_WTRIG_MASK;
3477 }
3478
3479 WREG32(CP_INT_CNTL, cp_int_cntl);
3480 WREG32(DMA_CNTL, dma_cntl);
3481 WREG32(DxMODE_INT_MASK, mode_int);
3482 WREG32(D1GRPH_INTERRUPT_CONTROL, d1grph);
3483 WREG32(D2GRPH_INTERRUPT_CONTROL, d2grph);
3484 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3485 if (ASIC_IS_DCE3(rdev)) {
3486 WREG32(DC_HPD1_INT_CONTROL, hpd1);
3487 WREG32(DC_HPD2_INT_CONTROL, hpd2);
3488 WREG32(DC_HPD3_INT_CONTROL, hpd3);
3489 WREG32(DC_HPD4_INT_CONTROL, hpd4);
3490 if (ASIC_IS_DCE32(rdev)) {
3491 WREG32(DC_HPD5_INT_CONTROL, hpd5);
3492 WREG32(DC_HPD6_INT_CONTROL, hpd6);
3493 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, hdmi0);
3494 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, hdmi1);
3495 } else {
3496 WREG32(HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
3497 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, hdmi1);
3498 }
3499 } else {
3500 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, hpd1);
3501 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, hpd2);
3502 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, hpd3);
3503 WREG32(HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
3504 WREG32(HDMI1_AUDIO_PACKET_CONTROL, hdmi1);
3505 }
3506 if ((rdev->family > CHIP_R600) && (rdev->family < CHIP_RV770)) {
3507 WREG32(CG_THERMAL_INT, thermal_int);
3508 } else if (rdev->family >= CHIP_RV770) {
3509 WREG32(RV770_CG_THERMAL_INT, thermal_int);
3510 }
3511
3512 return 0;
3513}
3514
3515static void r600_irq_ack(struct radeon_device *rdev)
3516{
3517 u32 tmp;
3518
3519 if (ASIC_IS_DCE3(rdev)) {
3520 rdev->irq.stat_regs.r600.disp_int = RREG32(DCE3_DISP_INTERRUPT_STATUS);
3521 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE);
3522 rdev->irq.stat_regs.r600.disp_int_cont2 = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE2);
3523 if (ASIC_IS_DCE32(rdev)) {
3524 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET0);
3525 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET1);
3526 } else {
3527 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3528 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(DCE3_HDMI1_STATUS);
3529 }
3530 } else {
3531 rdev->irq.stat_regs.r600.disp_int = RREG32(DISP_INTERRUPT_STATUS);
3532 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
3533 rdev->irq.stat_regs.r600.disp_int_cont2 = 0;
3534 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3535 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(HDMI1_STATUS);
3536 }
3537 rdev->irq.stat_regs.r600.d1grph_int = RREG32(D1GRPH_INTERRUPT_STATUS);
3538 rdev->irq.stat_regs.r600.d2grph_int = RREG32(D2GRPH_INTERRUPT_STATUS);
3539
3540 if (rdev->irq.stat_regs.r600.d1grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3541 WREG32(D1GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3542 if (rdev->irq.stat_regs.r600.d2grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3543 WREG32(D2GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3544 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT)
3545 WREG32(D1MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3546 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT)
3547 WREG32(D1MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3548 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT)
3549 WREG32(D2MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3550 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT)
3551 WREG32(D2MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3552 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) {
3553 if (ASIC_IS_DCE3(rdev)) {
3554 tmp = RREG32(DC_HPD1_INT_CONTROL);
3555 tmp |= DC_HPDx_INT_ACK;
3556 WREG32(DC_HPD1_INT_CONTROL, tmp);
3557 } else {
3558 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
3559 tmp |= DC_HPDx_INT_ACK;
3560 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
3561 }
3562 }
3563 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) {
3564 if (ASIC_IS_DCE3(rdev)) {
3565 tmp = RREG32(DC_HPD2_INT_CONTROL);
3566 tmp |= DC_HPDx_INT_ACK;
3567 WREG32(DC_HPD2_INT_CONTROL, tmp);
3568 } else {
3569 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
3570 tmp |= DC_HPDx_INT_ACK;
3571 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
3572 }
3573 }
3574 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) {
3575 if (ASIC_IS_DCE3(rdev)) {
3576 tmp = RREG32(DC_HPD3_INT_CONTROL);
3577 tmp |= DC_HPDx_INT_ACK;
3578 WREG32(DC_HPD3_INT_CONTROL, tmp);
3579 } else {
3580 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
3581 tmp |= DC_HPDx_INT_ACK;
3582 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
3583 }
3584 }
3585 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) {
3586 tmp = RREG32(DC_HPD4_INT_CONTROL);
3587 tmp |= DC_HPDx_INT_ACK;
3588 WREG32(DC_HPD4_INT_CONTROL, tmp);
3589 }
3590 if (ASIC_IS_DCE32(rdev)) {
3591 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) {
3592 tmp = RREG32(DC_HPD5_INT_CONTROL);
3593 tmp |= DC_HPDx_INT_ACK;
3594 WREG32(DC_HPD5_INT_CONTROL, tmp);
3595 }
3596 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) {
3597 tmp = RREG32(DC_HPD5_INT_CONTROL);
3598 tmp |= DC_HPDx_INT_ACK;
3599 WREG32(DC_HPD6_INT_CONTROL, tmp);
3600 }
3601 if (rdev->irq.stat_regs.r600.hdmi0_status & AFMT_AZ_FORMAT_WTRIG) {
3602 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0);
3603 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
3604 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, tmp);
3605 }
3606 if (rdev->irq.stat_regs.r600.hdmi1_status & AFMT_AZ_FORMAT_WTRIG) {
3607 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1);
3608 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
3609 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, tmp);
3610 }
3611 } else {
3612 if (rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG) {
3613 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL);
3614 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
3615 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
3616 }
3617 if (rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG) {
3618 if (ASIC_IS_DCE3(rdev)) {
3619 tmp = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL);
3620 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
3621 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, tmp);
3622 } else {
3623 tmp = RREG32(HDMI1_AUDIO_PACKET_CONTROL);
3624 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
3625 WREG32(HDMI1_AUDIO_PACKET_CONTROL, tmp);
3626 }
3627 }
3628 }
3629}
3630
3631void r600_irq_disable(struct radeon_device *rdev)
3632{
3633 r600_disable_interrupts(rdev);
3634
3635 mdelay(1);
3636 r600_irq_ack(rdev);
3637 r600_disable_interrupt_state(rdev);
3638}
3639
3640static u32 r600_get_ih_wptr(struct radeon_device *rdev)
3641{
3642 u32 wptr, tmp;
3643
3644 if (rdev->wb.enabled)
3645 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
3646 else
3647 wptr = RREG32(IH_RB_WPTR);
3648
3649 if (wptr & RB_OVERFLOW) {
3650
3651
3652
3653
3654 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
3655 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
3656 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
3657 tmp = RREG32(IH_RB_CNTL);
3658 tmp |= IH_WPTR_OVERFLOW_CLEAR;
3659 WREG32(IH_RB_CNTL, tmp);
3660 }
3661 return (wptr & rdev->ih.ptr_mask);
3662}
3663
3664
3665
3666
3667
3668
3669
3670
3671
3672
3673
3674
3675
3676
3677
3678
3679
3680
3681
3682
3683
3684
3685
3686
3687
3688
3689
3690
3691
3692
3693
3694int r600_irq_process(struct radeon_device *rdev)
3695{
3696 u32 wptr;
3697 u32 rptr;
3698 u32 src_id, src_data;
3699 u32 ring_index;
3700 bool queue_hotplug = false;
3701 bool queue_hdmi = false;
3702 bool queue_thermal = false;
3703
3704 if (!rdev->ih.enabled || rdev->shutdown)
3705 return IRQ_NONE;
3706
3707
3708 if (!rdev->msi_enabled)
3709 RREG32(IH_RB_WPTR);
3710
3711 wptr = r600_get_ih_wptr(rdev);
3712
3713restart_ih:
3714
3715 if (atomic_xchg(&rdev->ih.lock, 1))
3716 return IRQ_NONE;
3717
3718 rptr = rdev->ih.rptr;
3719 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
3720
3721
3722 rmb();
3723
3724
3725 r600_irq_ack(rdev);
3726
3727 while (rptr != wptr) {
3728
3729 ring_index = rptr / 4;
3730 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
3731 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
3732
3733 switch (src_id) {
3734 case 1:
3735 switch (src_data) {
3736 case 0:
3737 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT) {
3738 if (rdev->irq.crtc_vblank_int[0]) {
3739 drm_handle_vblank(rdev->ddev, 0);
3740 rdev->pm.vblank_sync = true;
3741 wake_up(&rdev->irq.vblank_queue);
3742 }
3743 if (atomic_read(&rdev->irq.pflip[0]))
3744 radeon_crtc_handle_flip(rdev, 0);
3745 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
3746 DRM_DEBUG("IH: D1 vblank\n");
3747 }
3748 break;
3749 case 1:
3750 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT) {
3751 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VLINE_INTERRUPT;
3752 DRM_DEBUG("IH: D1 vline\n");
3753 }
3754 break;
3755 default:
3756 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3757 break;
3758 }
3759 break;
3760 case 5:
3761 switch (src_data) {
3762 case 0:
3763 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT) {
3764 if (rdev->irq.crtc_vblank_int[1]) {
3765 drm_handle_vblank(rdev->ddev, 1);
3766 rdev->pm.vblank_sync = true;
3767 wake_up(&rdev->irq.vblank_queue);
3768 }
3769 if (atomic_read(&rdev->irq.pflip[1]))
3770 radeon_crtc_handle_flip(rdev, 1);
3771 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VBLANK_INTERRUPT;
3772 DRM_DEBUG("IH: D2 vblank\n");
3773 }
3774 break;
3775 case 1:
3776 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT) {
3777 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VLINE_INTERRUPT;
3778 DRM_DEBUG("IH: D2 vline\n");
3779 }
3780 break;
3781 default:
3782 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3783 break;
3784 }
3785 break;
3786 case 19:
3787 switch (src_data) {
3788 case 0:
3789 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) {
3790 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD1_INTERRUPT;
3791 queue_hotplug = true;
3792 DRM_DEBUG("IH: HPD1\n");
3793 }
3794 break;
3795 case 1:
3796 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) {
3797 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD2_INTERRUPT;
3798 queue_hotplug = true;
3799 DRM_DEBUG("IH: HPD2\n");
3800 }
3801 break;
3802 case 4:
3803 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) {
3804 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD3_INTERRUPT;
3805 queue_hotplug = true;
3806 DRM_DEBUG("IH: HPD3\n");
3807 }
3808 break;
3809 case 5:
3810 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) {
3811 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD4_INTERRUPT;
3812 queue_hotplug = true;
3813 DRM_DEBUG("IH: HPD4\n");
3814 }
3815 break;
3816 case 10:
3817 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) {
3818 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD5_INTERRUPT;
3819 queue_hotplug = true;
3820 DRM_DEBUG("IH: HPD5\n");
3821 }
3822 break;
3823 case 12:
3824 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) {
3825 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD6_INTERRUPT;
3826 queue_hotplug = true;
3827 DRM_DEBUG("IH: HPD6\n");
3828 }
3829 break;
3830 default:
3831 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3832 break;
3833 }
3834 break;
3835 case 21:
3836 switch (src_data) {
3837 case 4:
3838 if (rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG) {
3839 rdev->irq.stat_regs.r600.hdmi0_status &= ~HDMI0_AZ_FORMAT_WTRIG;
3840 queue_hdmi = true;
3841 DRM_DEBUG("IH: HDMI0\n");
3842 }
3843 break;
3844 case 5:
3845 if (rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG) {
3846 rdev->irq.stat_regs.r600.hdmi1_status &= ~HDMI0_AZ_FORMAT_WTRIG;
3847 queue_hdmi = true;
3848 DRM_DEBUG("IH: HDMI1\n");
3849 }
3850 break;
3851 default:
3852 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
3853 break;
3854 }
3855 break;
3856 case 176:
3857 case 177:
3858 case 178:
3859 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
3860 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3861 break;
3862 case 181:
3863 DRM_DEBUG("IH: CP EOP\n");
3864 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3865 break;
3866 case 224:
3867 DRM_DEBUG("IH: DMA trap\n");
3868 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
3869 break;
3870 case 230:
3871 DRM_DEBUG("IH: thermal low to high\n");
3872 rdev->pm.dpm.thermal.high_to_low = false;
3873 queue_thermal = true;
3874 break;
3875 case 231:
3876 DRM_DEBUG("IH: thermal high to low\n");
3877 rdev->pm.dpm.thermal.high_to_low = true;
3878 queue_thermal = true;
3879 break;
3880 case 233:
3881 DRM_DEBUG("IH: GUI idle\n");
3882 break;
3883 default:
3884 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3885 break;
3886 }
3887
3888
3889 rptr += 16;
3890 rptr &= rdev->ih.ptr_mask;
3891 }
3892 if (queue_hotplug)
3893 schedule_work(&rdev->hotplug_work);
3894 if (queue_hdmi)
3895 schedule_work(&rdev->audio_work);
3896 if (queue_thermal && rdev->pm.dpm_enabled)
3897 schedule_work(&rdev->pm.dpm.thermal.work);
3898 rdev->ih.rptr = rptr;
3899 WREG32(IH_RB_RPTR, rdev->ih.rptr);
3900 atomic_set(&rdev->ih.lock, 0);
3901
3902
3903 wptr = r600_get_ih_wptr(rdev);
3904 if (wptr != rptr)
3905 goto restart_ih;
3906
3907 return IRQ_HANDLED;
3908}
3909
3910
3911
3912
3913#if defined(CONFIG_DEBUG_FS)
3914
3915static int r600_debugfs_mc_info(struct seq_file *m, void *data)
3916{
3917 struct drm_info_node *node = (struct drm_info_node *) m->private;
3918 struct drm_device *dev = node->minor->dev;
3919 struct radeon_device *rdev = dev->dev_private;
3920
3921 DREG32_SYS(m, rdev, R_000E50_SRBM_STATUS);
3922 DREG32_SYS(m, rdev, VM_L2_STATUS);
3923 return 0;
3924}
3925
3926static struct drm_info_list r600_mc_info_list[] = {
3927 {"r600_mc_info", r600_debugfs_mc_info, 0, NULL},
3928};
3929#endif
3930
3931int r600_debugfs_mc_info_init(struct radeon_device *rdev)
3932{
3933#if defined(CONFIG_DEBUG_FS)
3934 return radeon_debugfs_add_files(rdev, r600_mc_info_list, ARRAY_SIZE(r600_mc_info_list));
3935#else
3936 return 0;
3937#endif
3938}
3939
3940
3941
3942
3943
3944
3945
3946
3947
3948
3949
3950void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo)
3951{
3952
3953
3954
3955
3956
3957 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
3958 rdev->vram_scratch.ptr && !(rdev->flags & RADEON_IS_AGP)) {
3959 void __iomem *ptr = (void *)rdev->vram_scratch.ptr;
3960 u32 tmp;
3961
3962 WREG32(HDP_DEBUG1, 0);
3963 tmp = readl((void __iomem *)ptr);
3964 } else
3965 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
3966}
3967
3968void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes)
3969{
3970 u32 link_width_cntl, mask;
3971
3972 if (rdev->flags & RADEON_IS_IGP)
3973 return;
3974
3975 if (!(rdev->flags & RADEON_IS_PCIE))
3976 return;
3977
3978
3979 if (ASIC_IS_X2(rdev))
3980 return;
3981
3982 radeon_gui_idle(rdev);
3983
3984 switch (lanes) {
3985 case 0:
3986 mask = RADEON_PCIE_LC_LINK_WIDTH_X0;
3987 break;
3988 case 1:
3989 mask = RADEON_PCIE_LC_LINK_WIDTH_X1;
3990 break;
3991 case 2:
3992 mask = RADEON_PCIE_LC_LINK_WIDTH_X2;
3993 break;
3994 case 4:
3995 mask = RADEON_PCIE_LC_LINK_WIDTH_X4;
3996 break;
3997 case 8:
3998 mask = RADEON_PCIE_LC_LINK_WIDTH_X8;
3999 break;
4000 case 12:
4001
4002 mask = RADEON_PCIE_LC_LINK_WIDTH_X12;
4003 break;
4004 case 16:
4005 mask = RADEON_PCIE_LC_LINK_WIDTH_X16;
4006 break;
4007 default:
4008 DRM_ERROR("invalid pcie lane request: %d\n", lanes);
4009 return;
4010 }
4011
4012 link_width_cntl = RREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
4013 link_width_cntl &= ~RADEON_PCIE_LC_LINK_WIDTH_MASK;
4014 link_width_cntl |= mask << RADEON_PCIE_LC_LINK_WIDTH_SHIFT;
4015 link_width_cntl |= (RADEON_PCIE_LC_RECONFIG_NOW |
4016 R600_PCIE_LC_RECONFIG_ARC_MISSING_ESCAPE);
4017
4018 WREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4019}
4020
4021int r600_get_pcie_lanes(struct radeon_device *rdev)
4022{
4023 u32 link_width_cntl;
4024
4025 if (rdev->flags & RADEON_IS_IGP)
4026 return 0;
4027
4028 if (!(rdev->flags & RADEON_IS_PCIE))
4029 return 0;
4030
4031
4032 if (ASIC_IS_X2(rdev))
4033 return 0;
4034
4035 radeon_gui_idle(rdev);
4036
4037 link_width_cntl = RREG32_PCIE_PORT(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
4038
4039 switch ((link_width_cntl & RADEON_PCIE_LC_LINK_WIDTH_RD_MASK) >> RADEON_PCIE_LC_LINK_WIDTH_RD_SHIFT) {
4040 case RADEON_PCIE_LC_LINK_WIDTH_X1:
4041 return 1;
4042 case RADEON_PCIE_LC_LINK_WIDTH_X2:
4043 return 2;
4044 case RADEON_PCIE_LC_LINK_WIDTH_X4:
4045 return 4;
4046 case RADEON_PCIE_LC_LINK_WIDTH_X8:
4047 return 8;
4048 case RADEON_PCIE_LC_LINK_WIDTH_X12:
4049
4050 return 12;
4051 case RADEON_PCIE_LC_LINK_WIDTH_X0:
4052 case RADEON_PCIE_LC_LINK_WIDTH_X16:
4053 default:
4054 return 16;
4055 }
4056}
4057
4058static void r600_pcie_gen2_enable(struct radeon_device *rdev)
4059{
4060 u32 link_width_cntl, lanes, speed_cntl, training_cntl, tmp;
4061 u16 link_cntl2;
4062
4063 if (radeon_pcie_gen2 == 0)
4064 return;
4065
4066 if (rdev->flags & RADEON_IS_IGP)
4067 return;
4068
4069 if (!(rdev->flags & RADEON_IS_PCIE))
4070 return;
4071
4072
4073 if (ASIC_IS_X2(rdev))
4074 return;
4075
4076
4077 if (rdev->family <= CHIP_R600)
4078 return;
4079
4080 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
4081 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
4082 return;
4083
4084 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4085 if (speed_cntl & LC_CURRENT_DATA_RATE) {
4086 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
4087 return;
4088 }
4089
4090 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
4091
4092
4093 if ((rdev->family == CHIP_RV670) ||
4094 (rdev->family == CHIP_RV620) ||
4095 (rdev->family == CHIP_RV635)) {
4096
4097 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4098 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
4099 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4100 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4101 if (link_width_cntl & LC_RENEGOTIATION_SUPPORT) {
4102 lanes = (link_width_cntl & LC_LINK_WIDTH_RD_MASK) >> LC_LINK_WIDTH_RD_SHIFT;
4103 link_width_cntl &= ~(LC_LINK_WIDTH_MASK |
4104 LC_RECONFIG_ARC_MISSING_ESCAPE);
4105 link_width_cntl |= lanes | LC_RECONFIG_NOW | LC_RENEGOTIATE_EN;
4106 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4107 } else {
4108 link_width_cntl |= LC_UPCONFIGURE_DIS;
4109 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4110 }
4111 }
4112
4113 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4114 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
4115 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
4116
4117
4118 if ((rdev->family == CHIP_RV670) ||
4119 (rdev->family == CHIP_RV620) ||
4120 (rdev->family == CHIP_RV635)) {
4121 WREG32(MM_CFGREGS_CNTL, 0x8);
4122 link_cntl2 = RREG32(0x4088);
4123 WREG32(MM_CFGREGS_CNTL, 0);
4124
4125 if (link_cntl2 & SELECTABLE_DEEMPHASIS)
4126 return;
4127 }
4128
4129 speed_cntl &= ~LC_SPEED_CHANGE_ATTEMPTS_ALLOWED_MASK;
4130 speed_cntl |= (0x3 << LC_SPEED_CHANGE_ATTEMPTS_ALLOWED_SHIFT);
4131 speed_cntl &= ~LC_VOLTAGE_TIMER_SEL_MASK;
4132 speed_cntl &= ~LC_FORCE_DIS_HW_SPEED_CHANGE;
4133 speed_cntl |= LC_FORCE_EN_HW_SPEED_CHANGE;
4134 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4135
4136 tmp = RREG32(0x541c);
4137 WREG32(0x541c, tmp | 0x8);
4138 WREG32(MM_CFGREGS_CNTL, MM_WR_TO_CFG_EN);
4139 link_cntl2 = RREG16(0x4088);
4140 link_cntl2 &= ~TARGET_LINK_SPEED_MASK;
4141 link_cntl2 |= 0x2;
4142 WREG16(0x4088, link_cntl2);
4143 WREG32(MM_CFGREGS_CNTL, 0);
4144
4145 if ((rdev->family == CHIP_RV670) ||
4146 (rdev->family == CHIP_RV620) ||
4147 (rdev->family == CHIP_RV635)) {
4148 training_cntl = RREG32_PCIE_PORT(PCIE_LC_TRAINING_CNTL);
4149 training_cntl &= ~LC_POINT_7_PLUS_EN;
4150 WREG32_PCIE_PORT(PCIE_LC_TRAINING_CNTL, training_cntl);
4151 } else {
4152 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4153 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
4154 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4155 }
4156
4157 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
4158 speed_cntl |= LC_GEN2_EN_STRAP;
4159 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
4160
4161 } else {
4162 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
4163
4164 if (1)
4165 link_width_cntl |= LC_UPCONFIGURE_DIS;
4166 else
4167 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
4168 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
4169 }
4170}
4171
4172
4173
4174
4175
4176
4177
4178
4179
4180uint64_t r600_get_gpu_clock_counter(struct radeon_device *rdev)
4181{
4182 uint64_t clock;
4183
4184 mutex_lock(&rdev->gpu_clock_mutex);
4185 WREG32(RLC_CAPTURE_GPU_CLOCK_COUNT, 1);
4186 clock = (uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_LSB) |
4187 ((uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_MSB) << 32ULL);
4188 mutex_unlock(&rdev->gpu_clock_mutex);
4189 return clock;
4190}
4191