1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <linux/slab.h>
29#include <linux/seq_file.h>
30#include <linux/firmware.h>
31#include <linux/platform_device.h>
32#include <linux/module.h>
33#include <drm/drmP.h>
34#include <drm/radeon_drm.h>
35#include "radeon.h"
36#include "radeon_asic.h"
37#include "radeon_mode.h"
38#include "r600d.h"
39#include "atom.h"
40#include "avivod.h"
41
42#define PFP_UCODE_SIZE 576
43#define PM4_UCODE_SIZE 1792
44#define RLC_UCODE_SIZE 768
45#define R700_PFP_UCODE_SIZE 848
46#define R700_PM4_UCODE_SIZE 1360
47#define R700_RLC_UCODE_SIZE 1024
48#define EVERGREEN_PFP_UCODE_SIZE 1120
49#define EVERGREEN_PM4_UCODE_SIZE 1376
50#define EVERGREEN_RLC_UCODE_SIZE 768
51#define CAYMAN_RLC_UCODE_SIZE 1024
52#define ARUBA_RLC_UCODE_SIZE 1536
53
54
55MODULE_FIRMWARE("radeon/R600_pfp.bin");
56MODULE_FIRMWARE("radeon/R600_me.bin");
57MODULE_FIRMWARE("radeon/RV610_pfp.bin");
58MODULE_FIRMWARE("radeon/RV610_me.bin");
59MODULE_FIRMWARE("radeon/RV630_pfp.bin");
60MODULE_FIRMWARE("radeon/RV630_me.bin");
61MODULE_FIRMWARE("radeon/RV620_pfp.bin");
62MODULE_FIRMWARE("radeon/RV620_me.bin");
63MODULE_FIRMWARE("radeon/RV635_pfp.bin");
64MODULE_FIRMWARE("radeon/RV635_me.bin");
65MODULE_FIRMWARE("radeon/RV670_pfp.bin");
66MODULE_FIRMWARE("radeon/RV670_me.bin");
67MODULE_FIRMWARE("radeon/RS780_pfp.bin");
68MODULE_FIRMWARE("radeon/RS780_me.bin");
69MODULE_FIRMWARE("radeon/RV770_pfp.bin");
70MODULE_FIRMWARE("radeon/RV770_me.bin");
71MODULE_FIRMWARE("radeon/RV730_pfp.bin");
72MODULE_FIRMWARE("radeon/RV730_me.bin");
73MODULE_FIRMWARE("radeon/RV710_pfp.bin");
74MODULE_FIRMWARE("radeon/RV710_me.bin");
75MODULE_FIRMWARE("radeon/R600_rlc.bin");
76MODULE_FIRMWARE("radeon/R700_rlc.bin");
77MODULE_FIRMWARE("radeon/CEDAR_pfp.bin");
78MODULE_FIRMWARE("radeon/CEDAR_me.bin");
79MODULE_FIRMWARE("radeon/CEDAR_rlc.bin");
80MODULE_FIRMWARE("radeon/REDWOOD_pfp.bin");
81MODULE_FIRMWARE("radeon/REDWOOD_me.bin");
82MODULE_FIRMWARE("radeon/REDWOOD_rlc.bin");
83MODULE_FIRMWARE("radeon/JUNIPER_pfp.bin");
84MODULE_FIRMWARE("radeon/JUNIPER_me.bin");
85MODULE_FIRMWARE("radeon/JUNIPER_rlc.bin");
86MODULE_FIRMWARE("radeon/CYPRESS_pfp.bin");
87MODULE_FIRMWARE("radeon/CYPRESS_me.bin");
88MODULE_FIRMWARE("radeon/CYPRESS_rlc.bin");
89MODULE_FIRMWARE("radeon/PALM_pfp.bin");
90MODULE_FIRMWARE("radeon/PALM_me.bin");
91MODULE_FIRMWARE("radeon/SUMO_rlc.bin");
92MODULE_FIRMWARE("radeon/SUMO_pfp.bin");
93MODULE_FIRMWARE("radeon/SUMO_me.bin");
94MODULE_FIRMWARE("radeon/SUMO2_pfp.bin");
95MODULE_FIRMWARE("radeon/SUMO2_me.bin");
96
97int r600_debugfs_mc_info_init(struct radeon_device *rdev);
98
99
100int r600_mc_wait_for_idle(struct radeon_device *rdev);
101static void r600_gpu_init(struct radeon_device *rdev);
102void r600_fini(struct radeon_device *rdev);
103void r600_irq_disable(struct radeon_device *rdev);
104static void r600_pcie_gen2_enable(struct radeon_device *rdev);
105
106
107int rv6xx_get_temp(struct radeon_device *rdev)
108{
109 u32 temp = (RREG32(CG_THERMAL_STATUS) & ASIC_T_MASK) >>
110 ASIC_T_SHIFT;
111 int actual_temp = temp & 0xff;
112
113 if (temp & 0x100)
114 actual_temp -= 256;
115
116 return actual_temp * 1000;
117}
118
119void r600_pm_get_dynpm_state(struct radeon_device *rdev)
120{
121 int i;
122
123 rdev->pm.dynpm_can_upclock = true;
124 rdev->pm.dynpm_can_downclock = true;
125
126
127 if ((rdev->flags & RADEON_IS_IGP) || (rdev->family == CHIP_R600)) {
128 int min_power_state_index = 0;
129
130 if (rdev->pm.num_power_states > 2)
131 min_power_state_index = 1;
132
133 switch (rdev->pm.dynpm_planned_action) {
134 case DYNPM_ACTION_MINIMUM:
135 rdev->pm.requested_power_state_index = min_power_state_index;
136 rdev->pm.requested_clock_mode_index = 0;
137 rdev->pm.dynpm_can_downclock = false;
138 break;
139 case DYNPM_ACTION_DOWNCLOCK:
140 if (rdev->pm.current_power_state_index == min_power_state_index) {
141 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
142 rdev->pm.dynpm_can_downclock = false;
143 } else {
144 if (rdev->pm.active_crtc_count > 1) {
145 for (i = 0; i < rdev->pm.num_power_states; i++) {
146 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
147 continue;
148 else if (i >= rdev->pm.current_power_state_index) {
149 rdev->pm.requested_power_state_index =
150 rdev->pm.current_power_state_index;
151 break;
152 } else {
153 rdev->pm.requested_power_state_index = i;
154 break;
155 }
156 }
157 } else {
158 if (rdev->pm.current_power_state_index == 0)
159 rdev->pm.requested_power_state_index =
160 rdev->pm.num_power_states - 1;
161 else
162 rdev->pm.requested_power_state_index =
163 rdev->pm.current_power_state_index - 1;
164 }
165 }
166 rdev->pm.requested_clock_mode_index = 0;
167
168 if ((rdev->pm.active_crtc_count > 0) &&
169 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
170 clock_info[rdev->pm.requested_clock_mode_index].flags &
171 RADEON_PM_MODE_NO_DISPLAY)) {
172 rdev->pm.requested_power_state_index++;
173 }
174 break;
175 case DYNPM_ACTION_UPCLOCK:
176 if (rdev->pm.current_power_state_index == (rdev->pm.num_power_states - 1)) {
177 rdev->pm.requested_power_state_index = rdev->pm.current_power_state_index;
178 rdev->pm.dynpm_can_upclock = false;
179 } else {
180 if (rdev->pm.active_crtc_count > 1) {
181 for (i = (rdev->pm.num_power_states - 1); i >= 0; i--) {
182 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
183 continue;
184 else if (i <= rdev->pm.current_power_state_index) {
185 rdev->pm.requested_power_state_index =
186 rdev->pm.current_power_state_index;
187 break;
188 } else {
189 rdev->pm.requested_power_state_index = i;
190 break;
191 }
192 }
193 } else
194 rdev->pm.requested_power_state_index =
195 rdev->pm.current_power_state_index + 1;
196 }
197 rdev->pm.requested_clock_mode_index = 0;
198 break;
199 case DYNPM_ACTION_DEFAULT:
200 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
201 rdev->pm.requested_clock_mode_index = 0;
202 rdev->pm.dynpm_can_upclock = false;
203 break;
204 case DYNPM_ACTION_NONE:
205 default:
206 DRM_ERROR("Requested mode for not defined action\n");
207 return;
208 }
209 } else {
210
211
212
213 if (rdev->pm.active_crtc_count > 1) {
214 rdev->pm.requested_power_state_index = -1;
215
216 for (i = 1; i < rdev->pm.num_power_states; i++) {
217 if (rdev->pm.power_state[i].flags & RADEON_PM_STATE_SINGLE_DISPLAY_ONLY)
218 continue;
219 else if ((rdev->pm.power_state[i].type == POWER_STATE_TYPE_PERFORMANCE) ||
220 (rdev->pm.power_state[i].type == POWER_STATE_TYPE_BATTERY)) {
221 rdev->pm.requested_power_state_index = i;
222 break;
223 }
224 }
225
226 if (rdev->pm.requested_power_state_index == -1)
227 rdev->pm.requested_power_state_index = 0;
228 } else
229 rdev->pm.requested_power_state_index = 1;
230
231 switch (rdev->pm.dynpm_planned_action) {
232 case DYNPM_ACTION_MINIMUM:
233 rdev->pm.requested_clock_mode_index = 0;
234 rdev->pm.dynpm_can_downclock = false;
235 break;
236 case DYNPM_ACTION_DOWNCLOCK:
237 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
238 if (rdev->pm.current_clock_mode_index == 0) {
239 rdev->pm.requested_clock_mode_index = 0;
240 rdev->pm.dynpm_can_downclock = false;
241 } else
242 rdev->pm.requested_clock_mode_index =
243 rdev->pm.current_clock_mode_index - 1;
244 } else {
245 rdev->pm.requested_clock_mode_index = 0;
246 rdev->pm.dynpm_can_downclock = false;
247 }
248
249 if ((rdev->pm.active_crtc_count > 0) &&
250 (rdev->pm.power_state[rdev->pm.requested_power_state_index].
251 clock_info[rdev->pm.requested_clock_mode_index].flags &
252 RADEON_PM_MODE_NO_DISPLAY)) {
253 rdev->pm.requested_clock_mode_index++;
254 }
255 break;
256 case DYNPM_ACTION_UPCLOCK:
257 if (rdev->pm.requested_power_state_index == rdev->pm.current_power_state_index) {
258 if (rdev->pm.current_clock_mode_index ==
259 (rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1)) {
260 rdev->pm.requested_clock_mode_index = rdev->pm.current_clock_mode_index;
261 rdev->pm.dynpm_can_upclock = false;
262 } else
263 rdev->pm.requested_clock_mode_index =
264 rdev->pm.current_clock_mode_index + 1;
265 } else {
266 rdev->pm.requested_clock_mode_index =
267 rdev->pm.power_state[rdev->pm.requested_power_state_index].num_clock_modes - 1;
268 rdev->pm.dynpm_can_upclock = false;
269 }
270 break;
271 case DYNPM_ACTION_DEFAULT:
272 rdev->pm.requested_power_state_index = rdev->pm.default_power_state_index;
273 rdev->pm.requested_clock_mode_index = 0;
274 rdev->pm.dynpm_can_upclock = false;
275 break;
276 case DYNPM_ACTION_NONE:
277 default:
278 DRM_ERROR("Requested mode for not defined action\n");
279 return;
280 }
281 }
282
283 DRM_DEBUG_DRIVER("Requested: e: %d m: %d p: %d\n",
284 rdev->pm.power_state[rdev->pm.requested_power_state_index].
285 clock_info[rdev->pm.requested_clock_mode_index].sclk,
286 rdev->pm.power_state[rdev->pm.requested_power_state_index].
287 clock_info[rdev->pm.requested_clock_mode_index].mclk,
288 rdev->pm.power_state[rdev->pm.requested_power_state_index].
289 pcie_lanes);
290}
291
292void rs780_pm_init_profile(struct radeon_device *rdev)
293{
294 if (rdev->pm.num_power_states == 2) {
295
296 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
297 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
298 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
299 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
300
301 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0;
302 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0;
303 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
304 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
305
306 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0;
307 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 0;
308 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
309 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
310
311 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 0;
312 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
313 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
314 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
315
316 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 0;
317 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
318 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
319 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
320
321 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 0;
322 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
323 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
324 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
325
326 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 0;
327 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 1;
328 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
329 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
330 } else if (rdev->pm.num_power_states == 3) {
331
332 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
333 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
334 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
335 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
336
337 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
338 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
339 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
340 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
341
342 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
343 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
344 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
345 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
346
347 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
348 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 2;
349 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
350 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
351
352 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 1;
353 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 1;
354 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
355 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
356
357 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 1;
358 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 1;
359 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
360 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
361
362 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 1;
363 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
364 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
365 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
366 } else {
367
368 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
369 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
370 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
371 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
372
373 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 2;
374 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 2;
375 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
376 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
377
378 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 2;
379 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 2;
380 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
381 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
382
383 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 2;
384 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 3;
385 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
386 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
387
388 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
389 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 0;
390 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
391 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
392
393 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
394 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 0;
395 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
396 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
397
398 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
399 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 3;
400 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
401 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
402 }
403}
404
405void r600_pm_init_profile(struct radeon_device *rdev)
406{
407 int idx;
408
409 if (rdev->family == CHIP_R600) {
410
411
412 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
413 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
414 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
415 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
416
417 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
418 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
419 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
420 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
421
422 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
423 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
424 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
425 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
426
427 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
428 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
429 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
430 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 0;
431
432 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
433 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
434 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
435 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
436
437 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
438 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
439 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
440 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
441
442 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
443 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
444 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
445 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 0;
446 } else {
447 if (rdev->pm.num_power_states < 4) {
448
449 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
450 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
451 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
452 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
453
454 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 1;
455 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 1;
456 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
457 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
458
459 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 1;
460 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = 1;
461 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
462 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
463
464 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = 1;
465 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = 1;
466 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
467 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
468
469 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = 2;
470 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = 2;
471 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
472 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
473
474 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = 2;
475 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = 2;
476 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
477 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
478
479 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = 2;
480 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = 2;
481 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
482 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
483 } else {
484
485 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
486 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
487 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
488 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
489
490 if (rdev->flags & RADEON_IS_MOBILITY)
491 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
492 else
493 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
494 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
495 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
496 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
497 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
498
499 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
500 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
501 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
502 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
503
504 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
505 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
506 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
507 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
508 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
509
510 if (rdev->flags & RADEON_IS_MOBILITY)
511 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 1);
512 else
513 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
514 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
515 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
516 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
517 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
518
519 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
520 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
521 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
522 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
523
524 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 1);
525 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
526 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
527 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
528 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
529 }
530 }
531}
532
533void r600_pm_misc(struct radeon_device *rdev)
534{
535 int req_ps_idx = rdev->pm.requested_power_state_index;
536 int req_cm_idx = rdev->pm.requested_clock_mode_index;
537 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
538 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
539
540 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
541
542 if (voltage->voltage == 0xff01)
543 return;
544 if (voltage->voltage != rdev->pm.current_vddc) {
545 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
546 rdev->pm.current_vddc = voltage->voltage;
547 DRM_DEBUG_DRIVER("Setting: v: %d\n", voltage->voltage);
548 }
549 }
550}
551
552bool r600_gui_idle(struct radeon_device *rdev)
553{
554 if (RREG32(GRBM_STATUS) & GUI_ACTIVE)
555 return false;
556 else
557 return true;
558}
559
560
561bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
562{
563 bool connected = false;
564
565 if (ASIC_IS_DCE3(rdev)) {
566 switch (hpd) {
567 case RADEON_HPD_1:
568 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
569 connected = true;
570 break;
571 case RADEON_HPD_2:
572 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
573 connected = true;
574 break;
575 case RADEON_HPD_3:
576 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
577 connected = true;
578 break;
579 case RADEON_HPD_4:
580 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
581 connected = true;
582 break;
583
584 case RADEON_HPD_5:
585 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
586 connected = true;
587 break;
588 case RADEON_HPD_6:
589 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
590 connected = true;
591 break;
592 default:
593 break;
594 }
595 } else {
596 switch (hpd) {
597 case RADEON_HPD_1:
598 if (RREG32(DC_HOT_PLUG_DETECT1_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
599 connected = true;
600 break;
601 case RADEON_HPD_2:
602 if (RREG32(DC_HOT_PLUG_DETECT2_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
603 connected = true;
604 break;
605 case RADEON_HPD_3:
606 if (RREG32(DC_HOT_PLUG_DETECT3_INT_STATUS) & DC_HOT_PLUG_DETECTx_SENSE)
607 connected = true;
608 break;
609 default:
610 break;
611 }
612 }
613 return connected;
614}
615
616void r600_hpd_set_polarity(struct radeon_device *rdev,
617 enum radeon_hpd_id hpd)
618{
619 u32 tmp;
620 bool connected = r600_hpd_sense(rdev, hpd);
621
622 if (ASIC_IS_DCE3(rdev)) {
623 switch (hpd) {
624 case RADEON_HPD_1:
625 tmp = RREG32(DC_HPD1_INT_CONTROL);
626 if (connected)
627 tmp &= ~DC_HPDx_INT_POLARITY;
628 else
629 tmp |= DC_HPDx_INT_POLARITY;
630 WREG32(DC_HPD1_INT_CONTROL, tmp);
631 break;
632 case RADEON_HPD_2:
633 tmp = RREG32(DC_HPD2_INT_CONTROL);
634 if (connected)
635 tmp &= ~DC_HPDx_INT_POLARITY;
636 else
637 tmp |= DC_HPDx_INT_POLARITY;
638 WREG32(DC_HPD2_INT_CONTROL, tmp);
639 break;
640 case RADEON_HPD_3:
641 tmp = RREG32(DC_HPD3_INT_CONTROL);
642 if (connected)
643 tmp &= ~DC_HPDx_INT_POLARITY;
644 else
645 tmp |= DC_HPDx_INT_POLARITY;
646 WREG32(DC_HPD3_INT_CONTROL, tmp);
647 break;
648 case RADEON_HPD_4:
649 tmp = RREG32(DC_HPD4_INT_CONTROL);
650 if (connected)
651 tmp &= ~DC_HPDx_INT_POLARITY;
652 else
653 tmp |= DC_HPDx_INT_POLARITY;
654 WREG32(DC_HPD4_INT_CONTROL, tmp);
655 break;
656 case RADEON_HPD_5:
657 tmp = RREG32(DC_HPD5_INT_CONTROL);
658 if (connected)
659 tmp &= ~DC_HPDx_INT_POLARITY;
660 else
661 tmp |= DC_HPDx_INT_POLARITY;
662 WREG32(DC_HPD5_INT_CONTROL, tmp);
663 break;
664
665 case RADEON_HPD_6:
666 tmp = RREG32(DC_HPD6_INT_CONTROL);
667 if (connected)
668 tmp &= ~DC_HPDx_INT_POLARITY;
669 else
670 tmp |= DC_HPDx_INT_POLARITY;
671 WREG32(DC_HPD6_INT_CONTROL, tmp);
672 break;
673 default:
674 break;
675 }
676 } else {
677 switch (hpd) {
678 case RADEON_HPD_1:
679 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
680 if (connected)
681 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
682 else
683 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
684 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
685 break;
686 case RADEON_HPD_2:
687 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
688 if (connected)
689 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
690 else
691 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
692 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
693 break;
694 case RADEON_HPD_3:
695 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
696 if (connected)
697 tmp &= ~DC_HOT_PLUG_DETECTx_INT_POLARITY;
698 else
699 tmp |= DC_HOT_PLUG_DETECTx_INT_POLARITY;
700 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
701 break;
702 default:
703 break;
704 }
705 }
706}
707
708void r600_hpd_init(struct radeon_device *rdev)
709{
710 struct drm_device *dev = rdev->ddev;
711 struct drm_connector *connector;
712 unsigned enable = 0;
713
714 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
715 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
716
717 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
718 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
719
720
721
722
723 continue;
724 }
725 if (ASIC_IS_DCE3(rdev)) {
726 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) | DC_HPDx_RX_INT_TIMER(0xfa);
727 if (ASIC_IS_DCE32(rdev))
728 tmp |= DC_HPDx_EN;
729
730 switch (radeon_connector->hpd.hpd) {
731 case RADEON_HPD_1:
732 WREG32(DC_HPD1_CONTROL, tmp);
733 break;
734 case RADEON_HPD_2:
735 WREG32(DC_HPD2_CONTROL, tmp);
736 break;
737 case RADEON_HPD_3:
738 WREG32(DC_HPD3_CONTROL, tmp);
739 break;
740 case RADEON_HPD_4:
741 WREG32(DC_HPD4_CONTROL, tmp);
742 break;
743
744 case RADEON_HPD_5:
745 WREG32(DC_HPD5_CONTROL, tmp);
746 break;
747 case RADEON_HPD_6:
748 WREG32(DC_HPD6_CONTROL, tmp);
749 break;
750 default:
751 break;
752 }
753 } else {
754 switch (radeon_connector->hpd.hpd) {
755 case RADEON_HPD_1:
756 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, DC_HOT_PLUG_DETECTx_EN);
757 break;
758 case RADEON_HPD_2:
759 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, DC_HOT_PLUG_DETECTx_EN);
760 break;
761 case RADEON_HPD_3:
762 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, DC_HOT_PLUG_DETECTx_EN);
763 break;
764 default:
765 break;
766 }
767 }
768 enable |= 1 << radeon_connector->hpd.hpd;
769 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
770 }
771 radeon_irq_kms_enable_hpd(rdev, enable);
772}
773
774void r600_hpd_fini(struct radeon_device *rdev)
775{
776 struct drm_device *dev = rdev->ddev;
777 struct drm_connector *connector;
778 unsigned disable = 0;
779
780 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
781 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
782 if (ASIC_IS_DCE3(rdev)) {
783 switch (radeon_connector->hpd.hpd) {
784 case RADEON_HPD_1:
785 WREG32(DC_HPD1_CONTROL, 0);
786 break;
787 case RADEON_HPD_2:
788 WREG32(DC_HPD2_CONTROL, 0);
789 break;
790 case RADEON_HPD_3:
791 WREG32(DC_HPD3_CONTROL, 0);
792 break;
793 case RADEON_HPD_4:
794 WREG32(DC_HPD4_CONTROL, 0);
795 break;
796
797 case RADEON_HPD_5:
798 WREG32(DC_HPD5_CONTROL, 0);
799 break;
800 case RADEON_HPD_6:
801 WREG32(DC_HPD6_CONTROL, 0);
802 break;
803 default:
804 break;
805 }
806 } else {
807 switch (radeon_connector->hpd.hpd) {
808 case RADEON_HPD_1:
809 WREG32(DC_HOT_PLUG_DETECT1_CONTROL, 0);
810 break;
811 case RADEON_HPD_2:
812 WREG32(DC_HOT_PLUG_DETECT2_CONTROL, 0);
813 break;
814 case RADEON_HPD_3:
815 WREG32(DC_HOT_PLUG_DETECT3_CONTROL, 0);
816 break;
817 default:
818 break;
819 }
820 }
821 disable |= 1 << radeon_connector->hpd.hpd;
822 }
823 radeon_irq_kms_disable_hpd(rdev, disable);
824}
825
826
827
828
829void r600_pcie_gart_tlb_flush(struct radeon_device *rdev)
830{
831 unsigned i;
832 u32 tmp;
833
834
835 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
836 !(rdev->flags & RADEON_IS_AGP)) {
837 void __iomem *ptr = (void *)rdev->gart.ptr;
838 u32 tmp;
839
840
841
842
843
844
845 WREG32(HDP_DEBUG1, 0);
846 tmp = readl((void __iomem *)ptr);
847 } else
848 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
849
850 WREG32(VM_CONTEXT0_INVALIDATION_LOW_ADDR, rdev->mc.gtt_start >> 12);
851 WREG32(VM_CONTEXT0_INVALIDATION_HIGH_ADDR, (rdev->mc.gtt_end - 1) >> 12);
852 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
853 for (i = 0; i < rdev->usec_timeout; i++) {
854
855 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
856 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
857 if (tmp == 2) {
858 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
859 return;
860 }
861 if (tmp) {
862 return;
863 }
864 udelay(1);
865 }
866}
867
868int r600_pcie_gart_init(struct radeon_device *rdev)
869{
870 int r;
871
872 if (rdev->gart.robj) {
873 WARN(1, "R600 PCIE GART already initialized\n");
874 return 0;
875 }
876
877 r = radeon_gart_init(rdev);
878 if (r)
879 return r;
880 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8;
881 return radeon_gart_table_vram_alloc(rdev);
882}
883
884static int r600_pcie_gart_enable(struct radeon_device *rdev)
885{
886 u32 tmp;
887 int r, i;
888
889 if (rdev->gart.robj == NULL) {
890 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
891 return -EINVAL;
892 }
893 r = radeon_gart_table_vram_pin(rdev);
894 if (r)
895 return r;
896 radeon_gart_restore(rdev);
897
898
899 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
900 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
901 EFFECTIVE_L2_QUEUE_SIZE(7));
902 WREG32(VM_L2_CNTL2, 0);
903 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
904
905 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
906 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
907 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
908 ENABLE_WAIT_L2_QUERY;
909 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
910 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
911 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp | ENABLE_L1_STRICT_ORDERING);
912 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
913 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
914 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
915 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
916 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
917 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
918 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
919 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
920 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
921 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
922 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
923 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
924 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
925 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
926 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
927 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
928 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
929 (u32)(rdev->dummy_page.addr >> 12));
930 for (i = 1; i < 7; i++)
931 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
932
933 r600_pcie_gart_tlb_flush(rdev);
934 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
935 (unsigned)(rdev->mc.gtt_size >> 20),
936 (unsigned long long)rdev->gart.table_addr);
937 rdev->gart.ready = true;
938 return 0;
939}
940
941static void r600_pcie_gart_disable(struct radeon_device *rdev)
942{
943 u32 tmp;
944 int i;
945
946
947 for (i = 0; i < 7; i++)
948 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
949
950
951 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
952 EFFECTIVE_L2_QUEUE_SIZE(7));
953 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
954
955 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
956 ENABLE_WAIT_L2_QUERY;
957 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
958 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
959 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
960 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
961 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
962 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
963 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
964 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
965 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp);
966 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp);
967 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
968 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
969 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp);
970 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
971 radeon_gart_table_vram_unpin(rdev);
972}
973
974static void r600_pcie_gart_fini(struct radeon_device *rdev)
975{
976 radeon_gart_fini(rdev);
977 r600_pcie_gart_disable(rdev);
978 radeon_gart_table_vram_free(rdev);
979}
980
981static void r600_agp_enable(struct radeon_device *rdev)
982{
983 u32 tmp;
984 int i;
985
986
987 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
988 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
989 EFFECTIVE_L2_QUEUE_SIZE(7));
990 WREG32(VM_L2_CNTL2, 0);
991 WREG32(VM_L2_CNTL3, BANK_SELECT_0(0) | BANK_SELECT_1(1));
992
993 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
994 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
995 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5) |
996 ENABLE_WAIT_L2_QUERY;
997 WREG32(MC_VM_L1_TLB_MCB_RD_SYS_CNTL, tmp);
998 WREG32(MC_VM_L1_TLB_MCB_WR_SYS_CNTL, tmp);
999 WREG32(MC_VM_L1_TLB_MCB_RD_HDP_CNTL, tmp | ENABLE_L1_STRICT_ORDERING);
1000 WREG32(MC_VM_L1_TLB_MCB_WR_HDP_CNTL, tmp);
1001 WREG32(MC_VM_L1_TLB_MCD_RD_A_CNTL, tmp);
1002 WREG32(MC_VM_L1_TLB_MCD_WR_A_CNTL, tmp);
1003 WREG32(MC_VM_L1_TLB_MCD_RD_B_CNTL, tmp);
1004 WREG32(MC_VM_L1_TLB_MCD_WR_B_CNTL, tmp);
1005 WREG32(MC_VM_L1_TLB_MCB_RD_GFX_CNTL, tmp);
1006 WREG32(MC_VM_L1_TLB_MCB_WR_GFX_CNTL, tmp);
1007 WREG32(MC_VM_L1_TLB_MCB_RD_PDMA_CNTL, tmp);
1008 WREG32(MC_VM_L1_TLB_MCB_WR_PDMA_CNTL, tmp);
1009 WREG32(MC_VM_L1_TLB_MCB_RD_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1010 WREG32(MC_VM_L1_TLB_MCB_WR_SEM_CNTL, tmp | ENABLE_SEMAPHORE_MODE);
1011 for (i = 0; i < 7; i++)
1012 WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
1013}
1014
1015int r600_mc_wait_for_idle(struct radeon_device *rdev)
1016{
1017 unsigned i;
1018 u32 tmp;
1019
1020 for (i = 0; i < rdev->usec_timeout; i++) {
1021
1022 tmp = RREG32(R_000E50_SRBM_STATUS) & 0x3F00;
1023 if (!tmp)
1024 return 0;
1025 udelay(1);
1026 }
1027 return -1;
1028}
1029
1030static void r600_mc_program(struct radeon_device *rdev)
1031{
1032 struct rv515_mc_save save;
1033 u32 tmp;
1034 int i, j;
1035
1036
1037 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1038 WREG32((0x2c14 + j), 0x00000000);
1039 WREG32((0x2c18 + j), 0x00000000);
1040 WREG32((0x2c1c + j), 0x00000000);
1041 WREG32((0x2c20 + j), 0x00000000);
1042 WREG32((0x2c24 + j), 0x00000000);
1043 }
1044 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
1045
1046 rv515_mc_stop(rdev, &save);
1047 if (r600_mc_wait_for_idle(rdev)) {
1048 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1049 }
1050
1051 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
1052
1053 if (rdev->flags & RADEON_IS_AGP) {
1054 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1055
1056 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1057 rdev->mc.vram_start >> 12);
1058 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1059 rdev->mc.gtt_end >> 12);
1060 } else {
1061
1062 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1063 rdev->mc.gtt_start >> 12);
1064 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1065 rdev->mc.vram_end >> 12);
1066 }
1067 } else {
1068 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start >> 12);
1069 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end >> 12);
1070 }
1071 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
1072 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1073 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1074 WREG32(MC_VM_FB_LOCATION, tmp);
1075 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1076 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
1077 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
1078 if (rdev->flags & RADEON_IS_AGP) {
1079 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 22);
1080 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 22);
1081 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1082 } else {
1083 WREG32(MC_VM_AGP_BASE, 0);
1084 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
1085 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
1086 }
1087 if (r600_mc_wait_for_idle(rdev)) {
1088 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1089 }
1090 rv515_mc_resume(rdev, &save);
1091
1092
1093 rv515_vga_render_disable(rdev);
1094}
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117static void r600_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
1118{
1119 u64 size_bf, size_af;
1120
1121 if (mc->mc_vram_size > 0xE0000000) {
1122
1123 dev_warn(rdev->dev, "limiting VRAM\n");
1124 mc->real_vram_size = 0xE0000000;
1125 mc->mc_vram_size = 0xE0000000;
1126 }
1127 if (rdev->flags & RADEON_IS_AGP) {
1128 size_bf = mc->gtt_start;
1129 size_af = 0xFFFFFFFF - mc->gtt_end;
1130 if (size_bf > size_af) {
1131 if (mc->mc_vram_size > size_bf) {
1132 dev_warn(rdev->dev, "limiting VRAM\n");
1133 mc->real_vram_size = size_bf;
1134 mc->mc_vram_size = size_bf;
1135 }
1136 mc->vram_start = mc->gtt_start - mc->mc_vram_size;
1137 } else {
1138 if (mc->mc_vram_size > size_af) {
1139 dev_warn(rdev->dev, "limiting VRAM\n");
1140 mc->real_vram_size = size_af;
1141 mc->mc_vram_size = size_af;
1142 }
1143 mc->vram_start = mc->gtt_end + 1;
1144 }
1145 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
1146 dev_info(rdev->dev, "VRAM: %lluM 0x%08llX - 0x%08llX (%lluM used)\n",
1147 mc->mc_vram_size >> 20, mc->vram_start,
1148 mc->vram_end, mc->real_vram_size >> 20);
1149 } else {
1150 u64 base = 0;
1151 if (rdev->flags & RADEON_IS_IGP) {
1152 base = RREG32(MC_VM_FB_LOCATION) & 0xFFFF;
1153 base <<= 24;
1154 }
1155 radeon_vram_location(rdev, &rdev->mc, base);
1156 rdev->mc.gtt_base_align = 0;
1157 radeon_gtt_location(rdev, mc);
1158 }
1159}
1160
1161static int r600_mc_init(struct radeon_device *rdev)
1162{
1163 u32 tmp;
1164 int chansize, numchan;
1165
1166
1167 rdev->mc.vram_is_ddr = true;
1168 tmp = RREG32(RAMCFG);
1169 if (tmp & CHANSIZE_OVERRIDE) {
1170 chansize = 16;
1171 } else if (tmp & CHANSIZE_MASK) {
1172 chansize = 64;
1173 } else {
1174 chansize = 32;
1175 }
1176 tmp = RREG32(CHMAP);
1177 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1178 case 0:
1179 default:
1180 numchan = 1;
1181 break;
1182 case 1:
1183 numchan = 2;
1184 break;
1185 case 2:
1186 numchan = 4;
1187 break;
1188 case 3:
1189 numchan = 8;
1190 break;
1191 }
1192 rdev->mc.vram_width = numchan * chansize;
1193
1194 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1195 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
1196
1197 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
1198 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
1199 rdev->mc.visible_vram_size = rdev->mc.aper_size;
1200 r600_vram_gtt_location(rdev, &rdev->mc);
1201
1202 if (rdev->flags & RADEON_IS_IGP) {
1203 rs690_pm_info(rdev);
1204 rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
1205 }
1206 radeon_update_bandwidth_info(rdev);
1207 return 0;
1208}
1209
1210int r600_vram_scratch_init(struct radeon_device *rdev)
1211{
1212 int r;
1213
1214 if (rdev->vram_scratch.robj == NULL) {
1215 r = radeon_bo_create(rdev, RADEON_GPU_PAGE_SIZE,
1216 PAGE_SIZE, true, RADEON_GEM_DOMAIN_VRAM,
1217 NULL, &rdev->vram_scratch.robj);
1218 if (r) {
1219 return r;
1220 }
1221 }
1222
1223 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1224 if (unlikely(r != 0))
1225 return r;
1226 r = radeon_bo_pin(rdev->vram_scratch.robj,
1227 RADEON_GEM_DOMAIN_VRAM, &rdev->vram_scratch.gpu_addr);
1228 if (r) {
1229 radeon_bo_unreserve(rdev->vram_scratch.robj);
1230 return r;
1231 }
1232 r = radeon_bo_kmap(rdev->vram_scratch.robj,
1233 (void **)&rdev->vram_scratch.ptr);
1234 if (r)
1235 radeon_bo_unpin(rdev->vram_scratch.robj);
1236 radeon_bo_unreserve(rdev->vram_scratch.robj);
1237
1238 return r;
1239}
1240
1241void r600_vram_scratch_fini(struct radeon_device *rdev)
1242{
1243 int r;
1244
1245 if (rdev->vram_scratch.robj == NULL) {
1246 return;
1247 }
1248 r = radeon_bo_reserve(rdev->vram_scratch.robj, false);
1249 if (likely(r == 0)) {
1250 radeon_bo_kunmap(rdev->vram_scratch.robj);
1251 radeon_bo_unpin(rdev->vram_scratch.robj);
1252 radeon_bo_unreserve(rdev->vram_scratch.robj);
1253 }
1254 radeon_bo_unref(&rdev->vram_scratch.robj);
1255}
1256
1257
1258
1259
1260
1261static int r600_gpu_soft_reset(struct radeon_device *rdev)
1262{
1263 struct rv515_mc_save save;
1264 u32 grbm_busy_mask = S_008010_VC_BUSY(1) | S_008010_VGT_BUSY_NO_DMA(1) |
1265 S_008010_VGT_BUSY(1) | S_008010_TA03_BUSY(1) |
1266 S_008010_TC_BUSY(1) | S_008010_SX_BUSY(1) |
1267 S_008010_SH_BUSY(1) | S_008010_SPI03_BUSY(1) |
1268 S_008010_SMX_BUSY(1) | S_008010_SC_BUSY(1) |
1269 S_008010_PA_BUSY(1) | S_008010_DB03_BUSY(1) |
1270 S_008010_CR_BUSY(1) | S_008010_CB03_BUSY(1) |
1271 S_008010_GUI_ACTIVE(1);
1272 u32 grbm2_busy_mask = S_008014_SPI0_BUSY(1) | S_008014_SPI1_BUSY(1) |
1273 S_008014_SPI2_BUSY(1) | S_008014_SPI3_BUSY(1) |
1274 S_008014_TA0_BUSY(1) | S_008014_TA1_BUSY(1) |
1275 S_008014_TA2_BUSY(1) | S_008014_TA3_BUSY(1) |
1276 S_008014_DB0_BUSY(1) | S_008014_DB1_BUSY(1) |
1277 S_008014_DB2_BUSY(1) | S_008014_DB3_BUSY(1) |
1278 S_008014_CB0_BUSY(1) | S_008014_CB1_BUSY(1) |
1279 S_008014_CB2_BUSY(1) | S_008014_CB3_BUSY(1);
1280 u32 tmp;
1281
1282 if (!(RREG32(GRBM_STATUS) & GUI_ACTIVE))
1283 return 0;
1284
1285 dev_info(rdev->dev, "GPU softreset \n");
1286 dev_info(rdev->dev, " R_008010_GRBM_STATUS=0x%08X\n",
1287 RREG32(R_008010_GRBM_STATUS));
1288 dev_info(rdev->dev, " R_008014_GRBM_STATUS2=0x%08X\n",
1289 RREG32(R_008014_GRBM_STATUS2));
1290 dev_info(rdev->dev, " R_000E50_SRBM_STATUS=0x%08X\n",
1291 RREG32(R_000E50_SRBM_STATUS));
1292 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
1293 RREG32(CP_STALLED_STAT1));
1294 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
1295 RREG32(CP_STALLED_STAT2));
1296 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
1297 RREG32(CP_BUSY_STAT));
1298 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
1299 RREG32(CP_STAT));
1300 rv515_mc_stop(rdev, &save);
1301 if (r600_mc_wait_for_idle(rdev)) {
1302 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1303 }
1304
1305 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
1306
1307 if ((RREG32(R_008010_GRBM_STATUS) & grbm_busy_mask) ||
1308 (RREG32(R_008014_GRBM_STATUS2) & grbm2_busy_mask)) {
1309 tmp = S_008020_SOFT_RESET_CR(1) |
1310 S_008020_SOFT_RESET_DB(1) |
1311 S_008020_SOFT_RESET_CB(1) |
1312 S_008020_SOFT_RESET_PA(1) |
1313 S_008020_SOFT_RESET_SC(1) |
1314 S_008020_SOFT_RESET_SMX(1) |
1315 S_008020_SOFT_RESET_SPI(1) |
1316 S_008020_SOFT_RESET_SX(1) |
1317 S_008020_SOFT_RESET_SH(1) |
1318 S_008020_SOFT_RESET_TC(1) |
1319 S_008020_SOFT_RESET_TA(1) |
1320 S_008020_SOFT_RESET_VC(1) |
1321 S_008020_SOFT_RESET_VGT(1);
1322 dev_info(rdev->dev, " R_008020_GRBM_SOFT_RESET=0x%08X\n", tmp);
1323 WREG32(R_008020_GRBM_SOFT_RESET, tmp);
1324 RREG32(R_008020_GRBM_SOFT_RESET);
1325 mdelay(15);
1326 WREG32(R_008020_GRBM_SOFT_RESET, 0);
1327 }
1328
1329 tmp = S_008020_SOFT_RESET_CP(1);
1330 dev_info(rdev->dev, "R_008020_GRBM_SOFT_RESET=0x%08X\n", tmp);
1331 WREG32(R_008020_GRBM_SOFT_RESET, tmp);
1332 RREG32(R_008020_GRBM_SOFT_RESET);
1333 mdelay(15);
1334 WREG32(R_008020_GRBM_SOFT_RESET, 0);
1335
1336 mdelay(1);
1337 dev_info(rdev->dev, " R_008010_GRBM_STATUS=0x%08X\n",
1338 RREG32(R_008010_GRBM_STATUS));
1339 dev_info(rdev->dev, " R_008014_GRBM_STATUS2=0x%08X\n",
1340 RREG32(R_008014_GRBM_STATUS2));
1341 dev_info(rdev->dev, " R_000E50_SRBM_STATUS=0x%08X\n",
1342 RREG32(R_000E50_SRBM_STATUS));
1343 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
1344 RREG32(CP_STALLED_STAT1));
1345 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
1346 RREG32(CP_STALLED_STAT2));
1347 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
1348 RREG32(CP_BUSY_STAT));
1349 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
1350 RREG32(CP_STAT));
1351 rv515_mc_resume(rdev, &save);
1352 return 0;
1353}
1354
1355bool r600_gpu_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
1356{
1357 u32 srbm_status;
1358 u32 grbm_status;
1359 u32 grbm_status2;
1360
1361 srbm_status = RREG32(R_000E50_SRBM_STATUS);
1362 grbm_status = RREG32(R_008010_GRBM_STATUS);
1363 grbm_status2 = RREG32(R_008014_GRBM_STATUS2);
1364 if (!G_008010_GUI_ACTIVE(grbm_status)) {
1365 radeon_ring_lockup_update(ring);
1366 return false;
1367 }
1368
1369 radeon_ring_force_activity(rdev, ring);
1370 return radeon_ring_test_lockup(rdev, ring);
1371}
1372
1373int r600_asic_reset(struct radeon_device *rdev)
1374{
1375 return r600_gpu_soft_reset(rdev);
1376}
1377
1378u32 r6xx_remap_render_backend(struct radeon_device *rdev,
1379 u32 tiling_pipe_num,
1380 u32 max_rb_num,
1381 u32 total_max_rb_num,
1382 u32 disabled_rb_mask)
1383{
1384 u32 rendering_pipe_num, rb_num_width, req_rb_num;
1385 u32 pipe_rb_ratio, pipe_rb_remain;
1386 u32 data = 0, mask = 1 << (max_rb_num - 1);
1387 unsigned i, j;
1388
1389
1390 disabled_rb_mask |= (0xff << max_rb_num) & 0xff;
1391
1392 rendering_pipe_num = 1 << tiling_pipe_num;
1393 req_rb_num = total_max_rb_num - r600_count_pipe_bits(disabled_rb_mask);
1394 BUG_ON(rendering_pipe_num < req_rb_num);
1395
1396 pipe_rb_ratio = rendering_pipe_num / req_rb_num;
1397 pipe_rb_remain = rendering_pipe_num - pipe_rb_ratio * req_rb_num;
1398
1399 if (rdev->family <= CHIP_RV740) {
1400
1401 rb_num_width = 2;
1402 } else {
1403
1404 rb_num_width = 4;
1405 }
1406
1407 for (i = 0; i < max_rb_num; i++) {
1408 if (!(mask & disabled_rb_mask)) {
1409 for (j = 0; j < pipe_rb_ratio; j++) {
1410 data <<= rb_num_width;
1411 data |= max_rb_num - i - 1;
1412 }
1413 if (pipe_rb_remain) {
1414 data <<= rb_num_width;
1415 data |= max_rb_num - i - 1;
1416 pipe_rb_remain--;
1417 }
1418 }
1419 mask >>= 1;
1420 }
1421
1422 return data;
1423}
1424
1425int r600_count_pipe_bits(uint32_t val)
1426{
1427 int i, ret = 0;
1428
1429 for (i = 0; i < 32; i++) {
1430 ret += val & 1;
1431 val >>= 1;
1432 }
1433 return ret;
1434}
1435
1436static void r600_gpu_init(struct radeon_device *rdev)
1437{
1438 u32 tiling_config;
1439 u32 ramcfg;
1440 u32 cc_rb_backend_disable;
1441 u32 cc_gc_shader_pipe_config;
1442 u32 tmp;
1443 int i, j;
1444 u32 sq_config;
1445 u32 sq_gpr_resource_mgmt_1 = 0;
1446 u32 sq_gpr_resource_mgmt_2 = 0;
1447 u32 sq_thread_resource_mgmt = 0;
1448 u32 sq_stack_resource_mgmt_1 = 0;
1449 u32 sq_stack_resource_mgmt_2 = 0;
1450 u32 disabled_rb_mask;
1451
1452 rdev->config.r600.tiling_group_size = 256;
1453 switch (rdev->family) {
1454 case CHIP_R600:
1455 rdev->config.r600.max_pipes = 4;
1456 rdev->config.r600.max_tile_pipes = 8;
1457 rdev->config.r600.max_simds = 4;
1458 rdev->config.r600.max_backends = 4;
1459 rdev->config.r600.max_gprs = 256;
1460 rdev->config.r600.max_threads = 192;
1461 rdev->config.r600.max_stack_entries = 256;
1462 rdev->config.r600.max_hw_contexts = 8;
1463 rdev->config.r600.max_gs_threads = 16;
1464 rdev->config.r600.sx_max_export_size = 128;
1465 rdev->config.r600.sx_max_export_pos_size = 16;
1466 rdev->config.r600.sx_max_export_smx_size = 128;
1467 rdev->config.r600.sq_num_cf_insts = 2;
1468 break;
1469 case CHIP_RV630:
1470 case CHIP_RV635:
1471 rdev->config.r600.max_pipes = 2;
1472 rdev->config.r600.max_tile_pipes = 2;
1473 rdev->config.r600.max_simds = 3;
1474 rdev->config.r600.max_backends = 1;
1475 rdev->config.r600.max_gprs = 128;
1476 rdev->config.r600.max_threads = 192;
1477 rdev->config.r600.max_stack_entries = 128;
1478 rdev->config.r600.max_hw_contexts = 8;
1479 rdev->config.r600.max_gs_threads = 4;
1480 rdev->config.r600.sx_max_export_size = 128;
1481 rdev->config.r600.sx_max_export_pos_size = 16;
1482 rdev->config.r600.sx_max_export_smx_size = 128;
1483 rdev->config.r600.sq_num_cf_insts = 2;
1484 break;
1485 case CHIP_RV610:
1486 case CHIP_RV620:
1487 case CHIP_RS780:
1488 case CHIP_RS880:
1489 rdev->config.r600.max_pipes = 1;
1490 rdev->config.r600.max_tile_pipes = 1;
1491 rdev->config.r600.max_simds = 2;
1492 rdev->config.r600.max_backends = 1;
1493 rdev->config.r600.max_gprs = 128;
1494 rdev->config.r600.max_threads = 192;
1495 rdev->config.r600.max_stack_entries = 128;
1496 rdev->config.r600.max_hw_contexts = 4;
1497 rdev->config.r600.max_gs_threads = 4;
1498 rdev->config.r600.sx_max_export_size = 128;
1499 rdev->config.r600.sx_max_export_pos_size = 16;
1500 rdev->config.r600.sx_max_export_smx_size = 128;
1501 rdev->config.r600.sq_num_cf_insts = 1;
1502 break;
1503 case CHIP_RV670:
1504 rdev->config.r600.max_pipes = 4;
1505 rdev->config.r600.max_tile_pipes = 4;
1506 rdev->config.r600.max_simds = 4;
1507 rdev->config.r600.max_backends = 4;
1508 rdev->config.r600.max_gprs = 192;
1509 rdev->config.r600.max_threads = 192;
1510 rdev->config.r600.max_stack_entries = 256;
1511 rdev->config.r600.max_hw_contexts = 8;
1512 rdev->config.r600.max_gs_threads = 16;
1513 rdev->config.r600.sx_max_export_size = 128;
1514 rdev->config.r600.sx_max_export_pos_size = 16;
1515 rdev->config.r600.sx_max_export_smx_size = 128;
1516 rdev->config.r600.sq_num_cf_insts = 2;
1517 break;
1518 default:
1519 break;
1520 }
1521
1522
1523 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1524 WREG32((0x2c14 + j), 0x00000000);
1525 WREG32((0x2c18 + j), 0x00000000);
1526 WREG32((0x2c1c + j), 0x00000000);
1527 WREG32((0x2c20 + j), 0x00000000);
1528 WREG32((0x2c24 + j), 0x00000000);
1529 }
1530
1531 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
1532
1533
1534 tiling_config = 0;
1535 ramcfg = RREG32(RAMCFG);
1536 switch (rdev->config.r600.max_tile_pipes) {
1537 case 1:
1538 tiling_config |= PIPE_TILING(0);
1539 break;
1540 case 2:
1541 tiling_config |= PIPE_TILING(1);
1542 break;
1543 case 4:
1544 tiling_config |= PIPE_TILING(2);
1545 break;
1546 case 8:
1547 tiling_config |= PIPE_TILING(3);
1548 break;
1549 default:
1550 break;
1551 }
1552 rdev->config.r600.tiling_npipes = rdev->config.r600.max_tile_pipes;
1553 rdev->config.r600.tiling_nbanks = 4 << ((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
1554 tiling_config |= BANK_TILING((ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT);
1555 tiling_config |= GROUP_SIZE((ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
1556
1557 tmp = (ramcfg & NOOFROWS_MASK) >> NOOFROWS_SHIFT;
1558 if (tmp > 3) {
1559 tiling_config |= ROW_TILING(3);
1560 tiling_config |= SAMPLE_SPLIT(3);
1561 } else {
1562 tiling_config |= ROW_TILING(tmp);
1563 tiling_config |= SAMPLE_SPLIT(tmp);
1564 }
1565 tiling_config |= BANK_SWAPS(1);
1566
1567 cc_rb_backend_disable = RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000;
1568 tmp = R6XX_MAX_BACKENDS -
1569 r600_count_pipe_bits((cc_rb_backend_disable >> 16) & R6XX_MAX_BACKENDS_MASK);
1570 if (tmp < rdev->config.r600.max_backends) {
1571 rdev->config.r600.max_backends = tmp;
1572 }
1573
1574 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0x00ffff00;
1575 tmp = R6XX_MAX_PIPES -
1576 r600_count_pipe_bits((cc_gc_shader_pipe_config >> 8) & R6XX_MAX_PIPES_MASK);
1577 if (tmp < rdev->config.r600.max_pipes) {
1578 rdev->config.r600.max_pipes = tmp;
1579 }
1580 tmp = R6XX_MAX_SIMDS -
1581 r600_count_pipe_bits((cc_gc_shader_pipe_config >> 16) & R6XX_MAX_SIMDS_MASK);
1582 if (tmp < rdev->config.r600.max_simds) {
1583 rdev->config.r600.max_simds = tmp;
1584 }
1585
1586 disabled_rb_mask = (RREG32(CC_RB_BACKEND_DISABLE) >> 16) & R6XX_MAX_BACKENDS_MASK;
1587 tmp = (tiling_config & PIPE_TILING__MASK) >> PIPE_TILING__SHIFT;
1588 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.r600.max_backends,
1589 R6XX_MAX_BACKENDS, disabled_rb_mask);
1590 tiling_config |= tmp << 16;
1591 rdev->config.r600.backend_map = tmp;
1592
1593 rdev->config.r600.tile_config = tiling_config;
1594 WREG32(GB_TILING_CONFIG, tiling_config);
1595 WREG32(DCP_TILING_CONFIG, tiling_config & 0xffff);
1596 WREG32(HDP_TILING_CONFIG, tiling_config & 0xffff);
1597
1598 tmp = R6XX_MAX_PIPES - r600_count_pipe_bits((cc_gc_shader_pipe_config & INACTIVE_QD_PIPES_MASK) >> 8);
1599 WREG32(VGT_OUT_DEALLOC_CNTL, (tmp * 4) & DEALLOC_DIST_MASK);
1600 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, ((tmp * 4) - 2) & VTX_REUSE_DEPTH_MASK);
1601
1602
1603 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) | ROQ_IB2_START(0x2b)));
1604 WREG32(CP_MEQ_THRESHOLDS, (MEQ_END(0x40) | ROQ_END(0x40)));
1605
1606 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO | SYNC_GRADIENT |
1607 SYNC_WALKER | SYNC_ALIGNER));
1608
1609 if (rdev->family == CHIP_RV670)
1610 WREG32(ARB_GDEC_RD_CNTL, 0x00000021);
1611
1612 tmp = RREG32(SX_DEBUG_1);
1613 tmp |= SMX_EVENT_RELEASE;
1614 if ((rdev->family > CHIP_R600))
1615 tmp |= ENABLE_NEW_SMX_ADDRESS;
1616 WREG32(SX_DEBUG_1, tmp);
1617
1618 if (((rdev->family) == CHIP_R600) ||
1619 ((rdev->family) == CHIP_RV630) ||
1620 ((rdev->family) == CHIP_RV610) ||
1621 ((rdev->family) == CHIP_RV620) ||
1622 ((rdev->family) == CHIP_RS780) ||
1623 ((rdev->family) == CHIP_RS880)) {
1624 WREG32(DB_DEBUG, PREZ_MUST_WAIT_FOR_POSTZ_DONE);
1625 } else {
1626 WREG32(DB_DEBUG, 0);
1627 }
1628 WREG32(DB_WATERMARKS, (DEPTH_FREE(4) | DEPTH_CACHELINE_FREE(16) |
1629 DEPTH_FLUSH(16) | DEPTH_PENDING_FREE(4)));
1630
1631 WREG32(PA_SC_MULTI_CHIP_CNTL, 0);
1632 WREG32(VGT_NUM_INSTANCES, 0);
1633
1634 WREG32(SPI_CONFIG_CNTL, GPR_WRITE_PRIORITY(0));
1635 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(0));
1636
1637 tmp = RREG32(SQ_MS_FIFO_SIZES);
1638 if (((rdev->family) == CHIP_RV610) ||
1639 ((rdev->family) == CHIP_RV620) ||
1640 ((rdev->family) == CHIP_RS780) ||
1641 ((rdev->family) == CHIP_RS880)) {
1642 tmp = (CACHE_FIFO_SIZE(0xa) |
1643 FETCH_FIFO_HIWATER(0xa) |
1644 DONE_FIFO_HIWATER(0xe0) |
1645 ALU_UPDATE_FIFO_HIWATER(0x8));
1646 } else if (((rdev->family) == CHIP_R600) ||
1647 ((rdev->family) == CHIP_RV630)) {
1648 tmp &= ~DONE_FIFO_HIWATER(0xff);
1649 tmp |= DONE_FIFO_HIWATER(0x4);
1650 }
1651 WREG32(SQ_MS_FIFO_SIZES, tmp);
1652
1653
1654
1655
1656 sq_config = RREG32(SQ_CONFIG);
1657 sq_config &= ~(PS_PRIO(3) |
1658 VS_PRIO(3) |
1659 GS_PRIO(3) |
1660 ES_PRIO(3));
1661 sq_config |= (DX9_CONSTS |
1662 VC_ENABLE |
1663 PS_PRIO(0) |
1664 VS_PRIO(1) |
1665 GS_PRIO(2) |
1666 ES_PRIO(3));
1667
1668 if ((rdev->family) == CHIP_R600) {
1669 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(124) |
1670 NUM_VS_GPRS(124) |
1671 NUM_CLAUSE_TEMP_GPRS(4));
1672 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(0) |
1673 NUM_ES_GPRS(0));
1674 sq_thread_resource_mgmt = (NUM_PS_THREADS(136) |
1675 NUM_VS_THREADS(48) |
1676 NUM_GS_THREADS(4) |
1677 NUM_ES_THREADS(4));
1678 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(128) |
1679 NUM_VS_STACK_ENTRIES(128));
1680 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(0) |
1681 NUM_ES_STACK_ENTRIES(0));
1682 } else if (((rdev->family) == CHIP_RV610) ||
1683 ((rdev->family) == CHIP_RV620) ||
1684 ((rdev->family) == CHIP_RS780) ||
1685 ((rdev->family) == CHIP_RS880)) {
1686
1687 sq_config &= ~VC_ENABLE;
1688
1689 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
1690 NUM_VS_GPRS(44) |
1691 NUM_CLAUSE_TEMP_GPRS(2));
1692 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(17) |
1693 NUM_ES_GPRS(17));
1694 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
1695 NUM_VS_THREADS(78) |
1696 NUM_GS_THREADS(4) |
1697 NUM_ES_THREADS(31));
1698 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(40) |
1699 NUM_VS_STACK_ENTRIES(40));
1700 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(32) |
1701 NUM_ES_STACK_ENTRIES(16));
1702 } else if (((rdev->family) == CHIP_RV630) ||
1703 ((rdev->family) == CHIP_RV635)) {
1704 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
1705 NUM_VS_GPRS(44) |
1706 NUM_CLAUSE_TEMP_GPRS(2));
1707 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(18) |
1708 NUM_ES_GPRS(18));
1709 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
1710 NUM_VS_THREADS(78) |
1711 NUM_GS_THREADS(4) |
1712 NUM_ES_THREADS(31));
1713 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(40) |
1714 NUM_VS_STACK_ENTRIES(40));
1715 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(32) |
1716 NUM_ES_STACK_ENTRIES(16));
1717 } else if ((rdev->family) == CHIP_RV670) {
1718 sq_gpr_resource_mgmt_1 = (NUM_PS_GPRS(44) |
1719 NUM_VS_GPRS(44) |
1720 NUM_CLAUSE_TEMP_GPRS(2));
1721 sq_gpr_resource_mgmt_2 = (NUM_GS_GPRS(17) |
1722 NUM_ES_GPRS(17));
1723 sq_thread_resource_mgmt = (NUM_PS_THREADS(79) |
1724 NUM_VS_THREADS(78) |
1725 NUM_GS_THREADS(4) |
1726 NUM_ES_THREADS(31));
1727 sq_stack_resource_mgmt_1 = (NUM_PS_STACK_ENTRIES(64) |
1728 NUM_VS_STACK_ENTRIES(64));
1729 sq_stack_resource_mgmt_2 = (NUM_GS_STACK_ENTRIES(64) |
1730 NUM_ES_STACK_ENTRIES(64));
1731 }
1732
1733 WREG32(SQ_CONFIG, sq_config);
1734 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1735 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1736 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1737 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1738 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1739
1740 if (((rdev->family) == CHIP_RV610) ||
1741 ((rdev->family) == CHIP_RV620) ||
1742 ((rdev->family) == CHIP_RS780) ||
1743 ((rdev->family) == CHIP_RS880)) {
1744 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(TC_ONLY));
1745 } else {
1746 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(VC_AND_TC));
1747 }
1748
1749
1750 WREG32(PA_SC_AA_SAMPLE_LOCS_2S, (S0_X(0xc) | S0_Y(0x4) |
1751 S1_X(0x4) | S1_Y(0xc)));
1752 WREG32(PA_SC_AA_SAMPLE_LOCS_4S, (S0_X(0xe) | S0_Y(0xe) |
1753 S1_X(0x2) | S1_Y(0x2) |
1754 S2_X(0xa) | S2_Y(0x6) |
1755 S3_X(0x6) | S3_Y(0xa)));
1756 WREG32(PA_SC_AA_SAMPLE_LOCS_8S_WD0, (S0_X(0xe) | S0_Y(0xb) |
1757 S1_X(0x4) | S1_Y(0xc) |
1758 S2_X(0x1) | S2_Y(0x6) |
1759 S3_X(0xa) | S3_Y(0xe)));
1760 WREG32(PA_SC_AA_SAMPLE_LOCS_8S_WD1, (S4_X(0x6) | S4_Y(0x1) |
1761 S5_X(0x0) | S5_Y(0x0) |
1762 S6_X(0xb) | S6_Y(0x4) |
1763 S7_X(0x7) | S7_Y(0x8)));
1764
1765 WREG32(VGT_STRMOUT_EN, 0);
1766 tmp = rdev->config.r600.max_pipes * 16;
1767 switch (rdev->family) {
1768 case CHIP_RV610:
1769 case CHIP_RV620:
1770 case CHIP_RS780:
1771 case CHIP_RS880:
1772 tmp += 32;
1773 break;
1774 case CHIP_RV670:
1775 tmp += 128;
1776 break;
1777 default:
1778 break;
1779 }
1780 if (tmp > 256) {
1781 tmp = 256;
1782 }
1783 WREG32(VGT_ES_PER_GS, 128);
1784 WREG32(VGT_GS_PER_ES, tmp);
1785 WREG32(VGT_GS_PER_VS, 2);
1786 WREG32(VGT_GS_VERTEX_REUSE, 16);
1787
1788
1789 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1790 WREG32(VGT_STRMOUT_EN, 0);
1791 WREG32(SX_MISC, 0);
1792 WREG32(PA_SC_MODE_CNTL, 0);
1793 WREG32(PA_SC_AA_CONFIG, 0);
1794 WREG32(PA_SC_LINE_STIPPLE, 0);
1795 WREG32(SPI_INPUT_Z, 0);
1796 WREG32(SPI_PS_IN_CONTROL_0, NUM_INTERP(2));
1797 WREG32(CB_COLOR7_FRAG, 0);
1798
1799
1800 WREG32(CB_COLOR0_BASE, 0);
1801 WREG32(CB_COLOR1_BASE, 0);
1802 WREG32(CB_COLOR2_BASE, 0);
1803 WREG32(CB_COLOR3_BASE, 0);
1804 WREG32(CB_COLOR4_BASE, 0);
1805 WREG32(CB_COLOR5_BASE, 0);
1806 WREG32(CB_COLOR6_BASE, 0);
1807 WREG32(CB_COLOR7_BASE, 0);
1808 WREG32(CB_COLOR7_FRAG, 0);
1809
1810 switch (rdev->family) {
1811 case CHIP_RV610:
1812 case CHIP_RV620:
1813 case CHIP_RS780:
1814 case CHIP_RS880:
1815 tmp = TC_L2_SIZE(8);
1816 break;
1817 case CHIP_RV630:
1818 case CHIP_RV635:
1819 tmp = TC_L2_SIZE(4);
1820 break;
1821 case CHIP_R600:
1822 tmp = TC_L2_SIZE(0) | L2_DISABLE_LATE_HIT;
1823 break;
1824 default:
1825 tmp = TC_L2_SIZE(0);
1826 break;
1827 }
1828 WREG32(TC_CNTL, tmp);
1829
1830 tmp = RREG32(HDP_HOST_PATH_CNTL);
1831 WREG32(HDP_HOST_PATH_CNTL, tmp);
1832
1833 tmp = RREG32(ARB_POP);
1834 tmp |= ENABLE_TC128;
1835 WREG32(ARB_POP, tmp);
1836
1837 WREG32(PA_SC_MULTI_CHIP_CNTL, 0);
1838 WREG32(PA_CL_ENHANCE, (CLIP_VTX_REORDER_ENA |
1839 NUM_CLIP_SEQ(3)));
1840 WREG32(PA_SC_ENHANCE, FORCE_EOV_MAX_CLK_CNT(4095));
1841 WREG32(VC_ENHANCE, 0);
1842}
1843
1844
1845
1846
1847
1848u32 r600_pciep_rreg(struct radeon_device *rdev, u32 reg)
1849{
1850 u32 r;
1851
1852 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
1853 (void)RREG32(PCIE_PORT_INDEX);
1854 r = RREG32(PCIE_PORT_DATA);
1855 return r;
1856}
1857
1858void r600_pciep_wreg(struct radeon_device *rdev, u32 reg, u32 v)
1859{
1860 WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
1861 (void)RREG32(PCIE_PORT_INDEX);
1862 WREG32(PCIE_PORT_DATA, (v));
1863 (void)RREG32(PCIE_PORT_DATA);
1864}
1865
1866
1867
1868
1869void r600_cp_stop(struct radeon_device *rdev)
1870{
1871 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
1872 WREG32(R_0086D8_CP_ME_CNTL, S_0086D8_CP_ME_HALT(1));
1873 WREG32(SCRATCH_UMSK, 0);
1874}
1875
1876int r600_init_microcode(struct radeon_device *rdev)
1877{
1878 struct platform_device *pdev;
1879 const char *chip_name;
1880 const char *rlc_chip_name;
1881 size_t pfp_req_size, me_req_size, rlc_req_size;
1882 char fw_name[30];
1883 int err;
1884
1885 DRM_DEBUG("\n");
1886
1887 pdev = platform_device_register_simple("radeon_cp", 0, NULL, 0);
1888 err = IS_ERR(pdev);
1889 if (err) {
1890 printk(KERN_ERR "radeon_cp: Failed to register firmware\n");
1891 return -EINVAL;
1892 }
1893
1894 switch (rdev->family) {
1895 case CHIP_R600:
1896 chip_name = "R600";
1897 rlc_chip_name = "R600";
1898 break;
1899 case CHIP_RV610:
1900 chip_name = "RV610";
1901 rlc_chip_name = "R600";
1902 break;
1903 case CHIP_RV630:
1904 chip_name = "RV630";
1905 rlc_chip_name = "R600";
1906 break;
1907 case CHIP_RV620:
1908 chip_name = "RV620";
1909 rlc_chip_name = "R600";
1910 break;
1911 case CHIP_RV635:
1912 chip_name = "RV635";
1913 rlc_chip_name = "R600";
1914 break;
1915 case CHIP_RV670:
1916 chip_name = "RV670";
1917 rlc_chip_name = "R600";
1918 break;
1919 case CHIP_RS780:
1920 case CHIP_RS880:
1921 chip_name = "RS780";
1922 rlc_chip_name = "R600";
1923 break;
1924 case CHIP_RV770:
1925 chip_name = "RV770";
1926 rlc_chip_name = "R700";
1927 break;
1928 case CHIP_RV730:
1929 case CHIP_RV740:
1930 chip_name = "RV730";
1931 rlc_chip_name = "R700";
1932 break;
1933 case CHIP_RV710:
1934 chip_name = "RV710";
1935 rlc_chip_name = "R700";
1936 break;
1937 case CHIP_CEDAR:
1938 chip_name = "CEDAR";
1939 rlc_chip_name = "CEDAR";
1940 break;
1941 case CHIP_REDWOOD:
1942 chip_name = "REDWOOD";
1943 rlc_chip_name = "REDWOOD";
1944 break;
1945 case CHIP_JUNIPER:
1946 chip_name = "JUNIPER";
1947 rlc_chip_name = "JUNIPER";
1948 break;
1949 case CHIP_CYPRESS:
1950 case CHIP_HEMLOCK:
1951 chip_name = "CYPRESS";
1952 rlc_chip_name = "CYPRESS";
1953 break;
1954 case CHIP_PALM:
1955 chip_name = "PALM";
1956 rlc_chip_name = "SUMO";
1957 break;
1958 case CHIP_SUMO:
1959 chip_name = "SUMO";
1960 rlc_chip_name = "SUMO";
1961 break;
1962 case CHIP_SUMO2:
1963 chip_name = "SUMO2";
1964 rlc_chip_name = "SUMO";
1965 break;
1966 default: BUG();
1967 }
1968
1969 if (rdev->family >= CHIP_CEDAR) {
1970 pfp_req_size = EVERGREEN_PFP_UCODE_SIZE * 4;
1971 me_req_size = EVERGREEN_PM4_UCODE_SIZE * 4;
1972 rlc_req_size = EVERGREEN_RLC_UCODE_SIZE * 4;
1973 } else if (rdev->family >= CHIP_RV770) {
1974 pfp_req_size = R700_PFP_UCODE_SIZE * 4;
1975 me_req_size = R700_PM4_UCODE_SIZE * 4;
1976 rlc_req_size = R700_RLC_UCODE_SIZE * 4;
1977 } else {
1978 pfp_req_size = PFP_UCODE_SIZE * 4;
1979 me_req_size = PM4_UCODE_SIZE * 12;
1980 rlc_req_size = RLC_UCODE_SIZE * 4;
1981 }
1982
1983 DRM_INFO("Loading %s Microcode\n", chip_name);
1984
1985 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
1986 err = request_firmware(&rdev->pfp_fw, fw_name, &pdev->dev);
1987 if (err)
1988 goto out;
1989 if (rdev->pfp_fw->size != pfp_req_size) {
1990 printk(KERN_ERR
1991 "r600_cp: Bogus length %zu in firmware \"%s\"\n",
1992 rdev->pfp_fw->size, fw_name);
1993 err = -EINVAL;
1994 goto out;
1995 }
1996
1997 snprintf(fw_name, sizeof(fw_name), "radeon/%s_me.bin", chip_name);
1998 err = request_firmware(&rdev->me_fw, fw_name, &pdev->dev);
1999 if (err)
2000 goto out;
2001 if (rdev->me_fw->size != me_req_size) {
2002 printk(KERN_ERR
2003 "r600_cp: Bogus length %zu in firmware \"%s\"\n",
2004 rdev->me_fw->size, fw_name);
2005 err = -EINVAL;
2006 }
2007
2008 snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
2009 err = request_firmware(&rdev->rlc_fw, fw_name, &pdev->dev);
2010 if (err)
2011 goto out;
2012 if (rdev->rlc_fw->size != rlc_req_size) {
2013 printk(KERN_ERR
2014 "r600_rlc: Bogus length %zu in firmware \"%s\"\n",
2015 rdev->rlc_fw->size, fw_name);
2016 err = -EINVAL;
2017 }
2018
2019out:
2020 platform_device_unregister(pdev);
2021
2022 if (err) {
2023 if (err != -EINVAL)
2024 printk(KERN_ERR
2025 "r600_cp: Failed to load firmware \"%s\"\n",
2026 fw_name);
2027 release_firmware(rdev->pfp_fw);
2028 rdev->pfp_fw = NULL;
2029 release_firmware(rdev->me_fw);
2030 rdev->me_fw = NULL;
2031 release_firmware(rdev->rlc_fw);
2032 rdev->rlc_fw = NULL;
2033 }
2034 return err;
2035}
2036
2037static int r600_cp_load_microcode(struct radeon_device *rdev)
2038{
2039 const __be32 *fw_data;
2040 int i;
2041
2042 if (!rdev->me_fw || !rdev->pfp_fw)
2043 return -EINVAL;
2044
2045 r600_cp_stop(rdev);
2046
2047 WREG32(CP_RB_CNTL,
2048#ifdef __BIG_ENDIAN
2049 BUF_SWAP_32BIT |
2050#endif
2051 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2052
2053
2054 WREG32(GRBM_SOFT_RESET, SOFT_RESET_CP);
2055 RREG32(GRBM_SOFT_RESET);
2056 mdelay(15);
2057 WREG32(GRBM_SOFT_RESET, 0);
2058
2059 WREG32(CP_ME_RAM_WADDR, 0);
2060
2061 fw_data = (const __be32 *)rdev->me_fw->data;
2062 WREG32(CP_ME_RAM_WADDR, 0);
2063 for (i = 0; i < PM4_UCODE_SIZE * 3; i++)
2064 WREG32(CP_ME_RAM_DATA,
2065 be32_to_cpup(fw_data++));
2066
2067 fw_data = (const __be32 *)rdev->pfp_fw->data;
2068 WREG32(CP_PFP_UCODE_ADDR, 0);
2069 for (i = 0; i < PFP_UCODE_SIZE; i++)
2070 WREG32(CP_PFP_UCODE_DATA,
2071 be32_to_cpup(fw_data++));
2072
2073 WREG32(CP_PFP_UCODE_ADDR, 0);
2074 WREG32(CP_ME_RAM_WADDR, 0);
2075 WREG32(CP_ME_RAM_RADDR, 0);
2076 return 0;
2077}
2078
2079int r600_cp_start(struct radeon_device *rdev)
2080{
2081 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2082 int r;
2083 uint32_t cp_me;
2084
2085 r = radeon_ring_lock(rdev, ring, 7);
2086 if (r) {
2087 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2088 return r;
2089 }
2090 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2091 radeon_ring_write(ring, 0x1);
2092 if (rdev->family >= CHIP_RV770) {
2093 radeon_ring_write(ring, 0x0);
2094 radeon_ring_write(ring, rdev->config.rv770.max_hw_contexts - 1);
2095 } else {
2096 radeon_ring_write(ring, 0x3);
2097 radeon_ring_write(ring, rdev->config.r600.max_hw_contexts - 1);
2098 }
2099 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2100 radeon_ring_write(ring, 0);
2101 radeon_ring_write(ring, 0);
2102 radeon_ring_unlock_commit(rdev, ring);
2103
2104 cp_me = 0xff;
2105 WREG32(R_0086D8_CP_ME_CNTL, cp_me);
2106 return 0;
2107}
2108
2109int r600_cp_resume(struct radeon_device *rdev)
2110{
2111 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2112 u32 tmp;
2113 u32 rb_bufsz;
2114 int r;
2115
2116
2117 WREG32(GRBM_SOFT_RESET, SOFT_RESET_CP);
2118 RREG32(GRBM_SOFT_RESET);
2119 mdelay(15);
2120 WREG32(GRBM_SOFT_RESET, 0);
2121
2122
2123 rb_bufsz = drm_order(ring->ring_size / 8);
2124 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2125#ifdef __BIG_ENDIAN
2126 tmp |= BUF_SWAP_32BIT;
2127#endif
2128 WREG32(CP_RB_CNTL, tmp);
2129 WREG32(CP_SEM_WAIT_TIMER, 0x0);
2130
2131
2132 WREG32(CP_RB_WPTR_DELAY, 0);
2133
2134
2135 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2136 WREG32(CP_RB_RPTR_WR, 0);
2137 ring->wptr = 0;
2138 WREG32(CP_RB_WPTR, ring->wptr);
2139
2140
2141 WREG32(CP_RB_RPTR_ADDR,
2142 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2143 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2144 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2145
2146 if (rdev->wb.enabled)
2147 WREG32(SCRATCH_UMSK, 0xff);
2148 else {
2149 tmp |= RB_NO_UPDATE;
2150 WREG32(SCRATCH_UMSK, 0);
2151 }
2152
2153 mdelay(1);
2154 WREG32(CP_RB_CNTL, tmp);
2155
2156 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2157 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2158
2159 ring->rptr = RREG32(CP_RB_RPTR);
2160
2161 r600_cp_start(rdev);
2162 ring->ready = true;
2163 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2164 if (r) {
2165 ring->ready = false;
2166 return r;
2167 }
2168 return 0;
2169}
2170
2171void r600_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size)
2172{
2173 u32 rb_bufsz;
2174 int r;
2175
2176
2177 rb_bufsz = drm_order(ring_size / 8);
2178 ring_size = (1 << (rb_bufsz + 1)) * 4;
2179 ring->ring_size = ring_size;
2180 ring->align_mask = 16 - 1;
2181
2182 if (radeon_ring_supports_scratch_reg(rdev, ring)) {
2183 r = radeon_scratch_get(rdev, &ring->rptr_save_reg);
2184 if (r) {
2185 DRM_ERROR("failed to get scratch reg for rptr save (%d).\n", r);
2186 ring->rptr_save_reg = 0;
2187 }
2188 }
2189}
2190
2191void r600_cp_fini(struct radeon_device *rdev)
2192{
2193 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2194 r600_cp_stop(rdev);
2195 radeon_ring_fini(rdev, ring);
2196 radeon_scratch_free(rdev, ring->rptr_save_reg);
2197}
2198
2199
2200
2201
2202
2203void r600_scratch_init(struct radeon_device *rdev)
2204{
2205 int i;
2206
2207 rdev->scratch.num_reg = 7;
2208 rdev->scratch.reg_base = SCRATCH_REG0;
2209 for (i = 0; i < rdev->scratch.num_reg; i++) {
2210 rdev->scratch.free[i] = true;
2211 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
2212 }
2213}
2214
2215int r600_ring_test(struct radeon_device *rdev, struct radeon_ring *ring)
2216{
2217 uint32_t scratch;
2218 uint32_t tmp = 0;
2219 unsigned i;
2220 int r;
2221
2222 r = radeon_scratch_get(rdev, &scratch);
2223 if (r) {
2224 DRM_ERROR("radeon: cp failed to get scratch reg (%d).\n", r);
2225 return r;
2226 }
2227 WREG32(scratch, 0xCAFEDEAD);
2228 r = radeon_ring_lock(rdev, ring, 3);
2229 if (r) {
2230 DRM_ERROR("radeon: cp failed to lock ring %d (%d).\n", ring->idx, r);
2231 radeon_scratch_free(rdev, scratch);
2232 return r;
2233 }
2234 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2235 radeon_ring_write(ring, ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2236 radeon_ring_write(ring, 0xDEADBEEF);
2237 radeon_ring_unlock_commit(rdev, ring);
2238 for (i = 0; i < rdev->usec_timeout; i++) {
2239 tmp = RREG32(scratch);
2240 if (tmp == 0xDEADBEEF)
2241 break;
2242 DRM_UDELAY(1);
2243 }
2244 if (i < rdev->usec_timeout) {
2245 DRM_INFO("ring test on %d succeeded in %d usecs\n", ring->idx, i);
2246 } else {
2247 DRM_ERROR("radeon: ring %d test failed (scratch(0x%04X)=0x%08X)\n",
2248 ring->idx, scratch, tmp);
2249 r = -EINVAL;
2250 }
2251 radeon_scratch_free(rdev, scratch);
2252 return r;
2253}
2254
2255void r600_fence_ring_emit(struct radeon_device *rdev,
2256 struct radeon_fence *fence)
2257{
2258 struct radeon_ring *ring = &rdev->ring[fence->ring];
2259
2260 if (rdev->wb.use_event) {
2261 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
2262
2263 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
2264 radeon_ring_write(ring, PACKET3_TC_ACTION_ENA |
2265 PACKET3_VC_ACTION_ENA |
2266 PACKET3_SH_ACTION_ENA);
2267 radeon_ring_write(ring, 0xFFFFFFFF);
2268 radeon_ring_write(ring, 0);
2269 radeon_ring_write(ring, 10);
2270
2271 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
2272 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT_TS) | EVENT_INDEX(5));
2273 radeon_ring_write(ring, addr & 0xffffffff);
2274 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
2275 radeon_ring_write(ring, fence->seq);
2276 radeon_ring_write(ring, 0);
2277 } else {
2278
2279 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
2280 radeon_ring_write(ring, PACKET3_TC_ACTION_ENA |
2281 PACKET3_VC_ACTION_ENA |
2282 PACKET3_SH_ACTION_ENA);
2283 radeon_ring_write(ring, 0xFFFFFFFF);
2284 radeon_ring_write(ring, 0);
2285 radeon_ring_write(ring, 10);
2286 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE, 0));
2287 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_EVENT) | EVENT_INDEX(0));
2288
2289 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2290 radeon_ring_write(ring, (WAIT_UNTIL - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2291 radeon_ring_write(ring, WAIT_3D_IDLE_bit | WAIT_3D_IDLECLEAN_bit);
2292
2293 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2294 radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2295 radeon_ring_write(ring, fence->seq);
2296
2297 radeon_ring_write(ring, PACKET0(CP_INT_STATUS, 0));
2298 radeon_ring_write(ring, RB_INT_STAT);
2299 }
2300}
2301
2302void r600_semaphore_ring_emit(struct radeon_device *rdev,
2303 struct radeon_ring *ring,
2304 struct radeon_semaphore *semaphore,
2305 bool emit_wait)
2306{
2307 uint64_t addr = semaphore->gpu_addr;
2308 unsigned sel = emit_wait ? PACKET3_SEM_SEL_WAIT : PACKET3_SEM_SEL_SIGNAL;
2309
2310 if (rdev->family < CHIP_CAYMAN)
2311 sel |= PACKET3_SEM_WAIT_ON_SIGNAL;
2312
2313 radeon_ring_write(ring, PACKET3(PACKET3_MEM_SEMAPHORE, 1));
2314 radeon_ring_write(ring, addr & 0xffffffff);
2315 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | sel);
2316}
2317
2318int r600_copy_blit(struct radeon_device *rdev,
2319 uint64_t src_offset,
2320 uint64_t dst_offset,
2321 unsigned num_gpu_pages,
2322 struct radeon_fence **fence)
2323{
2324 struct radeon_semaphore *sem = NULL;
2325 struct radeon_sa_bo *vb = NULL;
2326 int r;
2327
2328 r = r600_blit_prepare_copy(rdev, num_gpu_pages, fence, &vb, &sem);
2329 if (r) {
2330 return r;
2331 }
2332 r600_kms_blit_copy(rdev, src_offset, dst_offset, num_gpu_pages, vb);
2333 r600_blit_done_copy(rdev, fence, vb, sem);
2334 return 0;
2335}
2336
2337int r600_set_surface_reg(struct radeon_device *rdev, int reg,
2338 uint32_t tiling_flags, uint32_t pitch,
2339 uint32_t offset, uint32_t obj_size)
2340{
2341
2342 return 0;
2343}
2344
2345void r600_clear_surface_reg(struct radeon_device *rdev, int reg)
2346{
2347
2348}
2349
2350static int r600_startup(struct radeon_device *rdev)
2351{
2352 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2353 int r;
2354
2355
2356 r600_pcie_gen2_enable(rdev);
2357
2358 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
2359 r = r600_init_microcode(rdev);
2360 if (r) {
2361 DRM_ERROR("Failed to load firmware!\n");
2362 return r;
2363 }
2364 }
2365
2366 r = r600_vram_scratch_init(rdev);
2367 if (r)
2368 return r;
2369
2370 r600_mc_program(rdev);
2371 if (rdev->flags & RADEON_IS_AGP) {
2372 r600_agp_enable(rdev);
2373 } else {
2374 r = r600_pcie_gart_enable(rdev);
2375 if (r)
2376 return r;
2377 }
2378 r600_gpu_init(rdev);
2379 r = r600_blit_init(rdev);
2380 if (r) {
2381 r600_blit_fini(rdev);
2382 rdev->asic->copy.copy = NULL;
2383 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
2384 }
2385
2386
2387 r = radeon_wb_init(rdev);
2388 if (r)
2389 return r;
2390
2391 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
2392 if (r) {
2393 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
2394 return r;
2395 }
2396
2397
2398 r = r600_irq_init(rdev);
2399 if (r) {
2400 DRM_ERROR("radeon: IH init failed (%d).\n", r);
2401 radeon_irq_kms_fini(rdev);
2402 return r;
2403 }
2404 r600_irq_set(rdev);
2405
2406 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
2407 R600_CP_RB_RPTR, R600_CP_RB_WPTR,
2408 0, 0xfffff, RADEON_CP_PACKET2);
2409
2410 if (r)
2411 return r;
2412 r = r600_cp_load_microcode(rdev);
2413 if (r)
2414 return r;
2415 r = r600_cp_resume(rdev);
2416 if (r)
2417 return r;
2418
2419 r = radeon_ib_pool_init(rdev);
2420 if (r) {
2421 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
2422 return r;
2423 }
2424
2425 r = r600_audio_init(rdev);
2426 if (r) {
2427 DRM_ERROR("radeon: audio init failed\n");
2428 return r;
2429 }
2430
2431 return 0;
2432}
2433
2434void r600_vga_set_state(struct radeon_device *rdev, bool state)
2435{
2436 uint32_t temp;
2437
2438 temp = RREG32(CONFIG_CNTL);
2439 if (state == false) {
2440 temp &= ~(1<<0);
2441 temp |= (1<<1);
2442 } else {
2443 temp &= ~(1<<1);
2444 }
2445 WREG32(CONFIG_CNTL, temp);
2446}
2447
2448int r600_resume(struct radeon_device *rdev)
2449{
2450 int r;
2451
2452
2453
2454
2455
2456
2457 atom_asic_init(rdev->mode_info.atom_context);
2458
2459 rdev->accel_working = true;
2460 r = r600_startup(rdev);
2461 if (r) {
2462 DRM_ERROR("r600 startup failed on resume\n");
2463 rdev->accel_working = false;
2464 return r;
2465 }
2466
2467 return r;
2468}
2469
2470int r600_suspend(struct radeon_device *rdev)
2471{
2472 r600_audio_fini(rdev);
2473 r600_cp_stop(rdev);
2474 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
2475 r600_irq_suspend(rdev);
2476 radeon_wb_disable(rdev);
2477 r600_pcie_gart_disable(rdev);
2478
2479 return 0;
2480}
2481
2482
2483
2484
2485
2486
2487
2488int r600_init(struct radeon_device *rdev)
2489{
2490 int r;
2491
2492 if (r600_debugfs_mc_info_init(rdev)) {
2493 DRM_ERROR("Failed to register debugfs file for mc !\n");
2494 }
2495
2496 if (!radeon_get_bios(rdev)) {
2497 if (ASIC_IS_AVIVO(rdev))
2498 return -EINVAL;
2499 }
2500
2501 if (!rdev->is_atom_bios) {
2502 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
2503 return -EINVAL;
2504 }
2505 r = radeon_atombios_init(rdev);
2506 if (r)
2507 return r;
2508
2509 if (!radeon_card_posted(rdev)) {
2510 if (!rdev->bios) {
2511 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2512 return -EINVAL;
2513 }
2514 DRM_INFO("GPU not posted. posting now...\n");
2515 atom_asic_init(rdev->mode_info.atom_context);
2516 }
2517
2518 r600_scratch_init(rdev);
2519
2520 radeon_surface_init(rdev);
2521
2522 radeon_get_clock_info(rdev->ddev);
2523
2524 r = radeon_fence_driver_init(rdev);
2525 if (r)
2526 return r;
2527 if (rdev->flags & RADEON_IS_AGP) {
2528 r = radeon_agp_init(rdev);
2529 if (r)
2530 radeon_agp_disable(rdev);
2531 }
2532 r = r600_mc_init(rdev);
2533 if (r)
2534 return r;
2535
2536 r = radeon_bo_init(rdev);
2537 if (r)
2538 return r;
2539
2540 r = radeon_irq_kms_init(rdev);
2541 if (r)
2542 return r;
2543
2544 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
2545 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
2546
2547 rdev->ih.ring_obj = NULL;
2548 r600_ih_ring_init(rdev, 64 * 1024);
2549
2550 r = r600_pcie_gart_init(rdev);
2551 if (r)
2552 return r;
2553
2554 rdev->accel_working = true;
2555 r = r600_startup(rdev);
2556 if (r) {
2557 dev_err(rdev->dev, "disabling GPU acceleration\n");
2558 r600_cp_fini(rdev);
2559 r600_irq_fini(rdev);
2560 radeon_wb_fini(rdev);
2561 radeon_ib_pool_fini(rdev);
2562 radeon_irq_kms_fini(rdev);
2563 r600_pcie_gart_fini(rdev);
2564 rdev->accel_working = false;
2565 }
2566
2567 return 0;
2568}
2569
2570void r600_fini(struct radeon_device *rdev)
2571{
2572 r600_audio_fini(rdev);
2573 r600_blit_fini(rdev);
2574 r600_cp_fini(rdev);
2575 r600_irq_fini(rdev);
2576 radeon_wb_fini(rdev);
2577 radeon_ib_pool_fini(rdev);
2578 radeon_irq_kms_fini(rdev);
2579 r600_pcie_gart_fini(rdev);
2580 r600_vram_scratch_fini(rdev);
2581 radeon_agp_fini(rdev);
2582 radeon_gem_fini(rdev);
2583 radeon_fence_driver_fini(rdev);
2584 radeon_bo_fini(rdev);
2585 radeon_atombios_fini(rdev);
2586 kfree(rdev->bios);
2587 rdev->bios = NULL;
2588}
2589
2590
2591
2592
2593
2594void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2595{
2596 struct radeon_ring *ring = &rdev->ring[ib->ring];
2597 u32 next_rptr;
2598
2599 if (ring->rptr_save_reg) {
2600 next_rptr = ring->wptr + 3 + 4;
2601 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2602 radeon_ring_write(ring, ((ring->rptr_save_reg -
2603 PACKET3_SET_CONFIG_REG_OFFSET) >> 2));
2604 radeon_ring_write(ring, next_rptr);
2605 } else if (rdev->wb.enabled) {
2606 next_rptr = ring->wptr + 5 + 4;
2607 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2608 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2609 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2610 radeon_ring_write(ring, next_rptr);
2611 radeon_ring_write(ring, 0);
2612 }
2613
2614 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2615 radeon_ring_write(ring,
2616#ifdef __BIG_ENDIAN
2617 (2 << 0) |
2618#endif
2619 (ib->gpu_addr & 0xFFFFFFFC));
2620 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2621 radeon_ring_write(ring, ib->length_dw);
2622}
2623
2624int r600_ib_test(struct radeon_device *rdev, struct radeon_ring *ring)
2625{
2626 struct radeon_ib ib;
2627 uint32_t scratch;
2628 uint32_t tmp = 0;
2629 unsigned i;
2630 int r;
2631
2632 r = radeon_scratch_get(rdev, &scratch);
2633 if (r) {
2634 DRM_ERROR("radeon: failed to get scratch reg (%d).\n", r);
2635 return r;
2636 }
2637 WREG32(scratch, 0xCAFEDEAD);
2638 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
2639 if (r) {
2640 DRM_ERROR("radeon: failed to get ib (%d).\n", r);
2641 goto free_scratch;
2642 }
2643 ib.ptr[0] = PACKET3(PACKET3_SET_CONFIG_REG, 1);
2644 ib.ptr[1] = ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2);
2645 ib.ptr[2] = 0xDEADBEEF;
2646 ib.length_dw = 3;
2647 r = radeon_ib_schedule(rdev, &ib, NULL);
2648 if (r) {
2649 DRM_ERROR("radeon: failed to schedule ib (%d).\n", r);
2650 goto free_ib;
2651 }
2652 r = radeon_fence_wait(ib.fence, false);
2653 if (r) {
2654 DRM_ERROR("radeon: fence wait failed (%d).\n", r);
2655 goto free_ib;
2656 }
2657 for (i = 0; i < rdev->usec_timeout; i++) {
2658 tmp = RREG32(scratch);
2659 if (tmp == 0xDEADBEEF)
2660 break;
2661 DRM_UDELAY(1);
2662 }
2663 if (i < rdev->usec_timeout) {
2664 DRM_INFO("ib test on ring %d succeeded in %u usecs\n", ib.fence->ring, i);
2665 } else {
2666 DRM_ERROR("radeon: ib test failed (scratch(0x%04X)=0x%08X)\n",
2667 scratch, tmp);
2668 r = -EINVAL;
2669 }
2670free_ib:
2671 radeon_ib_free(rdev, &ib);
2672free_scratch:
2673 radeon_scratch_free(rdev, scratch);
2674 return r;
2675}
2676
2677
2678
2679
2680
2681
2682
2683
2684
2685
2686
2687
2688void r600_ih_ring_init(struct radeon_device *rdev, unsigned ring_size)
2689{
2690 u32 rb_bufsz;
2691
2692
2693 rb_bufsz = drm_order(ring_size / 4);
2694 ring_size = (1 << rb_bufsz) * 4;
2695 rdev->ih.ring_size = ring_size;
2696 rdev->ih.ptr_mask = rdev->ih.ring_size - 1;
2697 rdev->ih.rptr = 0;
2698}
2699
2700int r600_ih_ring_alloc(struct radeon_device *rdev)
2701{
2702 int r;
2703
2704
2705 if (rdev->ih.ring_obj == NULL) {
2706 r = radeon_bo_create(rdev, rdev->ih.ring_size,
2707 PAGE_SIZE, true,
2708 RADEON_GEM_DOMAIN_GTT,
2709 NULL, &rdev->ih.ring_obj);
2710 if (r) {
2711 DRM_ERROR("radeon: failed to create ih ring buffer (%d).\n", r);
2712 return r;
2713 }
2714 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
2715 if (unlikely(r != 0))
2716 return r;
2717 r = radeon_bo_pin(rdev->ih.ring_obj,
2718 RADEON_GEM_DOMAIN_GTT,
2719 &rdev->ih.gpu_addr);
2720 if (r) {
2721 radeon_bo_unreserve(rdev->ih.ring_obj);
2722 DRM_ERROR("radeon: failed to pin ih ring buffer (%d).\n", r);
2723 return r;
2724 }
2725 r = radeon_bo_kmap(rdev->ih.ring_obj,
2726 (void **)&rdev->ih.ring);
2727 radeon_bo_unreserve(rdev->ih.ring_obj);
2728 if (r) {
2729 DRM_ERROR("radeon: failed to map ih ring buffer (%d).\n", r);
2730 return r;
2731 }
2732 }
2733 return 0;
2734}
2735
2736void r600_ih_ring_fini(struct radeon_device *rdev)
2737{
2738 int r;
2739 if (rdev->ih.ring_obj) {
2740 r = radeon_bo_reserve(rdev->ih.ring_obj, false);
2741 if (likely(r == 0)) {
2742 radeon_bo_kunmap(rdev->ih.ring_obj);
2743 radeon_bo_unpin(rdev->ih.ring_obj);
2744 radeon_bo_unreserve(rdev->ih.ring_obj);
2745 }
2746 radeon_bo_unref(&rdev->ih.ring_obj);
2747 rdev->ih.ring = NULL;
2748 rdev->ih.ring_obj = NULL;
2749 }
2750}
2751
2752void r600_rlc_stop(struct radeon_device *rdev)
2753{
2754
2755 if ((rdev->family >= CHIP_RV770) &&
2756 (rdev->family <= CHIP_RV740)) {
2757
2758 WREG32(SRBM_SOFT_RESET, SOFT_RESET_RLC);
2759 RREG32(SRBM_SOFT_RESET);
2760 mdelay(15);
2761 WREG32(SRBM_SOFT_RESET, 0);
2762 RREG32(SRBM_SOFT_RESET);
2763 }
2764
2765 WREG32(RLC_CNTL, 0);
2766}
2767
2768static void r600_rlc_start(struct radeon_device *rdev)
2769{
2770 WREG32(RLC_CNTL, RLC_ENABLE);
2771}
2772
2773static int r600_rlc_init(struct radeon_device *rdev)
2774{
2775 u32 i;
2776 const __be32 *fw_data;
2777
2778 if (!rdev->rlc_fw)
2779 return -EINVAL;
2780
2781 r600_rlc_stop(rdev);
2782
2783 WREG32(RLC_HB_CNTL, 0);
2784
2785 if (rdev->family == CHIP_ARUBA) {
2786 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
2787 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
2788 }
2789 if (rdev->family <= CHIP_CAYMAN) {
2790 WREG32(RLC_HB_BASE, 0);
2791 WREG32(RLC_HB_RPTR, 0);
2792 WREG32(RLC_HB_WPTR, 0);
2793 }
2794 if (rdev->family <= CHIP_CAICOS) {
2795 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
2796 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
2797 }
2798 WREG32(RLC_MC_CNTL, 0);
2799 WREG32(RLC_UCODE_CNTL, 0);
2800
2801 fw_data = (const __be32 *)rdev->rlc_fw->data;
2802 if (rdev->family >= CHIP_ARUBA) {
2803 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
2804 WREG32(RLC_UCODE_ADDR, i);
2805 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
2806 }
2807 } else if (rdev->family >= CHIP_CAYMAN) {
2808 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
2809 WREG32(RLC_UCODE_ADDR, i);
2810 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
2811 }
2812 } else if (rdev->family >= CHIP_CEDAR) {
2813 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
2814 WREG32(RLC_UCODE_ADDR, i);
2815 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
2816 }
2817 } else if (rdev->family >= CHIP_RV770) {
2818 for (i = 0; i < R700_RLC_UCODE_SIZE; i++) {
2819 WREG32(RLC_UCODE_ADDR, i);
2820 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
2821 }
2822 } else {
2823 for (i = 0; i < RLC_UCODE_SIZE; i++) {
2824 WREG32(RLC_UCODE_ADDR, i);
2825 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
2826 }
2827 }
2828 WREG32(RLC_UCODE_ADDR, 0);
2829
2830 r600_rlc_start(rdev);
2831
2832 return 0;
2833}
2834
2835static void r600_enable_interrupts(struct radeon_device *rdev)
2836{
2837 u32 ih_cntl = RREG32(IH_CNTL);
2838 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
2839
2840 ih_cntl |= ENABLE_INTR;
2841 ih_rb_cntl |= IH_RB_ENABLE;
2842 WREG32(IH_CNTL, ih_cntl);
2843 WREG32(IH_RB_CNTL, ih_rb_cntl);
2844 rdev->ih.enabled = true;
2845}
2846
2847void r600_disable_interrupts(struct radeon_device *rdev)
2848{
2849 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
2850 u32 ih_cntl = RREG32(IH_CNTL);
2851
2852 ih_rb_cntl &= ~IH_RB_ENABLE;
2853 ih_cntl &= ~ENABLE_INTR;
2854 WREG32(IH_RB_CNTL, ih_rb_cntl);
2855 WREG32(IH_CNTL, ih_cntl);
2856
2857 WREG32(IH_RB_RPTR, 0);
2858 WREG32(IH_RB_WPTR, 0);
2859 rdev->ih.enabled = false;
2860 rdev->ih.rptr = 0;
2861}
2862
2863static void r600_disable_interrupt_state(struct radeon_device *rdev)
2864{
2865 u32 tmp;
2866
2867 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
2868 WREG32(GRBM_INT_CNTL, 0);
2869 WREG32(DxMODE_INT_MASK, 0);
2870 WREG32(D1GRPH_INTERRUPT_CONTROL, 0);
2871 WREG32(D2GRPH_INTERRUPT_CONTROL, 0);
2872 if (ASIC_IS_DCE3(rdev)) {
2873 WREG32(DCE3_DACA_AUTODETECT_INT_CONTROL, 0);
2874 WREG32(DCE3_DACB_AUTODETECT_INT_CONTROL, 0);
2875 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2876 WREG32(DC_HPD1_INT_CONTROL, tmp);
2877 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2878 WREG32(DC_HPD2_INT_CONTROL, tmp);
2879 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2880 WREG32(DC_HPD3_INT_CONTROL, tmp);
2881 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2882 WREG32(DC_HPD4_INT_CONTROL, tmp);
2883 if (ASIC_IS_DCE32(rdev)) {
2884 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2885 WREG32(DC_HPD5_INT_CONTROL, tmp);
2886 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2887 WREG32(DC_HPD6_INT_CONTROL, tmp);
2888 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
2889 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, tmp);
2890 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
2891 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, tmp);
2892 } else {
2893 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
2894 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
2895 tmp = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
2896 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, tmp);
2897 }
2898 } else {
2899 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
2900 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
2901 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
2902 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
2903 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
2904 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
2905 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & DC_HOT_PLUG_DETECTx_INT_POLARITY;
2906 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
2907 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
2908 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
2909 tmp = RREG32(HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
2910 WREG32(HDMI1_AUDIO_PACKET_CONTROL, tmp);
2911 }
2912}
2913
2914int r600_irq_init(struct radeon_device *rdev)
2915{
2916 int ret = 0;
2917 int rb_bufsz;
2918 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
2919
2920
2921 ret = r600_ih_ring_alloc(rdev);
2922 if (ret)
2923 return ret;
2924
2925
2926 r600_disable_interrupts(rdev);
2927
2928
2929 ret = r600_rlc_init(rdev);
2930 if (ret) {
2931 r600_ih_ring_fini(rdev);
2932 return ret;
2933 }
2934
2935
2936
2937 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8);
2938 interrupt_cntl = RREG32(INTERRUPT_CNTL);
2939
2940
2941
2942 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
2943
2944 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
2945 WREG32(INTERRUPT_CNTL, interrupt_cntl);
2946
2947 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
2948 rb_bufsz = drm_order(rdev->ih.ring_size / 4);
2949
2950 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
2951 IH_WPTR_OVERFLOW_CLEAR |
2952 (rb_bufsz << 1));
2953
2954 if (rdev->wb.enabled)
2955 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
2956
2957
2958 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
2959 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
2960
2961 WREG32(IH_RB_CNTL, ih_rb_cntl);
2962
2963
2964 WREG32(IH_RB_RPTR, 0);
2965 WREG32(IH_RB_WPTR, 0);
2966
2967
2968 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10);
2969
2970 if (rdev->msi_enabled)
2971 ih_cntl |= RPTR_REARM;
2972 WREG32(IH_CNTL, ih_cntl);
2973
2974
2975 if (rdev->family >= CHIP_CEDAR)
2976 evergreen_disable_interrupt_state(rdev);
2977 else
2978 r600_disable_interrupt_state(rdev);
2979
2980
2981 pci_set_master(rdev->pdev);
2982
2983
2984 r600_enable_interrupts(rdev);
2985
2986 return ret;
2987}
2988
2989void r600_irq_suspend(struct radeon_device *rdev)
2990{
2991 r600_irq_disable(rdev);
2992 r600_rlc_stop(rdev);
2993}
2994
2995void r600_irq_fini(struct radeon_device *rdev)
2996{
2997 r600_irq_suspend(rdev);
2998 r600_ih_ring_fini(rdev);
2999}
3000
3001int r600_irq_set(struct radeon_device *rdev)
3002{
3003 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3004 u32 mode_int = 0;
3005 u32 hpd1, hpd2, hpd3, hpd4 = 0, hpd5 = 0, hpd6 = 0;
3006 u32 grbm_int_cntl = 0;
3007 u32 hdmi0, hdmi1;
3008 u32 d1grph = 0, d2grph = 0;
3009
3010 if (!rdev->irq.installed) {
3011 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3012 return -EINVAL;
3013 }
3014
3015 if (!rdev->ih.enabled) {
3016 r600_disable_interrupts(rdev);
3017
3018 r600_disable_interrupt_state(rdev);
3019 return 0;
3020 }
3021
3022 if (ASIC_IS_DCE3(rdev)) {
3023 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3024 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3025 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3026 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3027 if (ASIC_IS_DCE32(rdev)) {
3028 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3029 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3030 hdmi0 = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3031 hdmi1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3032 } else {
3033 hdmi0 = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3034 hdmi1 = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3035 }
3036 } else {
3037 hpd1 = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3038 hpd2 = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3039 hpd3 = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3040 hdmi0 = RREG32(HDMI0_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3041 hdmi1 = RREG32(HDMI1_AUDIO_PACKET_CONTROL) & ~HDMI0_AZ_FORMAT_WTRIG_MASK;
3042 }
3043
3044 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3045 DRM_DEBUG("r600_irq_set: sw int\n");
3046 cp_int_cntl |= RB_INT_ENABLE;
3047 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3048 }
3049 if (rdev->irq.crtc_vblank_int[0] ||
3050 atomic_read(&rdev->irq.pflip[0])) {
3051 DRM_DEBUG("r600_irq_set: vblank 0\n");
3052 mode_int |= D1MODE_VBLANK_INT_MASK;
3053 }
3054 if (rdev->irq.crtc_vblank_int[1] ||
3055 atomic_read(&rdev->irq.pflip[1])) {
3056 DRM_DEBUG("r600_irq_set: vblank 1\n");
3057 mode_int |= D2MODE_VBLANK_INT_MASK;
3058 }
3059 if (rdev->irq.hpd[0]) {
3060 DRM_DEBUG("r600_irq_set: hpd 1\n");
3061 hpd1 |= DC_HPDx_INT_EN;
3062 }
3063 if (rdev->irq.hpd[1]) {
3064 DRM_DEBUG("r600_irq_set: hpd 2\n");
3065 hpd2 |= DC_HPDx_INT_EN;
3066 }
3067 if (rdev->irq.hpd[2]) {
3068 DRM_DEBUG("r600_irq_set: hpd 3\n");
3069 hpd3 |= DC_HPDx_INT_EN;
3070 }
3071 if (rdev->irq.hpd[3]) {
3072 DRM_DEBUG("r600_irq_set: hpd 4\n");
3073 hpd4 |= DC_HPDx_INT_EN;
3074 }
3075 if (rdev->irq.hpd[4]) {
3076 DRM_DEBUG("r600_irq_set: hpd 5\n");
3077 hpd5 |= DC_HPDx_INT_EN;
3078 }
3079 if (rdev->irq.hpd[5]) {
3080 DRM_DEBUG("r600_irq_set: hpd 6\n");
3081 hpd6 |= DC_HPDx_INT_EN;
3082 }
3083 if (rdev->irq.afmt[0]) {
3084 DRM_DEBUG("r600_irq_set: hdmi 0\n");
3085 hdmi0 |= HDMI0_AZ_FORMAT_WTRIG_MASK;
3086 }
3087 if (rdev->irq.afmt[1]) {
3088 DRM_DEBUG("r600_irq_set: hdmi 0\n");
3089 hdmi1 |= HDMI0_AZ_FORMAT_WTRIG_MASK;
3090 }
3091
3092 WREG32(CP_INT_CNTL, cp_int_cntl);
3093 WREG32(DxMODE_INT_MASK, mode_int);
3094 WREG32(D1GRPH_INTERRUPT_CONTROL, d1grph);
3095 WREG32(D2GRPH_INTERRUPT_CONTROL, d2grph);
3096 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3097 if (ASIC_IS_DCE3(rdev)) {
3098 WREG32(DC_HPD1_INT_CONTROL, hpd1);
3099 WREG32(DC_HPD2_INT_CONTROL, hpd2);
3100 WREG32(DC_HPD3_INT_CONTROL, hpd3);
3101 WREG32(DC_HPD4_INT_CONTROL, hpd4);
3102 if (ASIC_IS_DCE32(rdev)) {
3103 WREG32(DC_HPD5_INT_CONTROL, hpd5);
3104 WREG32(DC_HPD6_INT_CONTROL, hpd6);
3105 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, hdmi0);
3106 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, hdmi1);
3107 } else {
3108 WREG32(HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
3109 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, hdmi1);
3110 }
3111 } else {
3112 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, hpd1);
3113 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, hpd2);
3114 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, hpd3);
3115 WREG32(HDMI0_AUDIO_PACKET_CONTROL, hdmi0);
3116 WREG32(HDMI1_AUDIO_PACKET_CONTROL, hdmi1);
3117 }
3118
3119 return 0;
3120}
3121
3122static void r600_irq_ack(struct radeon_device *rdev)
3123{
3124 u32 tmp;
3125
3126 if (ASIC_IS_DCE3(rdev)) {
3127 rdev->irq.stat_regs.r600.disp_int = RREG32(DCE3_DISP_INTERRUPT_STATUS);
3128 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE);
3129 rdev->irq.stat_regs.r600.disp_int_cont2 = RREG32(DCE3_DISP_INTERRUPT_STATUS_CONTINUE2);
3130 if (ASIC_IS_DCE32(rdev)) {
3131 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET0);
3132 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(AFMT_STATUS + DCE3_HDMI_OFFSET1);
3133 } else {
3134 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3135 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(DCE3_HDMI1_STATUS);
3136 }
3137 } else {
3138 rdev->irq.stat_regs.r600.disp_int = RREG32(DISP_INTERRUPT_STATUS);
3139 rdev->irq.stat_regs.r600.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
3140 rdev->irq.stat_regs.r600.disp_int_cont2 = 0;
3141 rdev->irq.stat_regs.r600.hdmi0_status = RREG32(HDMI0_STATUS);
3142 rdev->irq.stat_regs.r600.hdmi1_status = RREG32(HDMI1_STATUS);
3143 }
3144 rdev->irq.stat_regs.r600.d1grph_int = RREG32(D1GRPH_INTERRUPT_STATUS);
3145 rdev->irq.stat_regs.r600.d2grph_int = RREG32(D2GRPH_INTERRUPT_STATUS);
3146
3147 if (rdev->irq.stat_regs.r600.d1grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3148 WREG32(D1GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3149 if (rdev->irq.stat_regs.r600.d2grph_int & DxGRPH_PFLIP_INT_OCCURRED)
3150 WREG32(D2GRPH_INTERRUPT_STATUS, DxGRPH_PFLIP_INT_CLEAR);
3151 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT)
3152 WREG32(D1MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3153 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT)
3154 WREG32(D1MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3155 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT)
3156 WREG32(D2MODE_VBLANK_STATUS, DxMODE_VBLANK_ACK);
3157 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT)
3158 WREG32(D2MODE_VLINE_STATUS, DxMODE_VLINE_ACK);
3159 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) {
3160 if (ASIC_IS_DCE3(rdev)) {
3161 tmp = RREG32(DC_HPD1_INT_CONTROL);
3162 tmp |= DC_HPDx_INT_ACK;
3163 WREG32(DC_HPD1_INT_CONTROL, tmp);
3164 } else {
3165 tmp = RREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL);
3166 tmp |= DC_HPDx_INT_ACK;
3167 WREG32(DC_HOT_PLUG_DETECT1_INT_CONTROL, tmp);
3168 }
3169 }
3170 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) {
3171 if (ASIC_IS_DCE3(rdev)) {
3172 tmp = RREG32(DC_HPD2_INT_CONTROL);
3173 tmp |= DC_HPDx_INT_ACK;
3174 WREG32(DC_HPD2_INT_CONTROL, tmp);
3175 } else {
3176 tmp = RREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL);
3177 tmp |= DC_HPDx_INT_ACK;
3178 WREG32(DC_HOT_PLUG_DETECT2_INT_CONTROL, tmp);
3179 }
3180 }
3181 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) {
3182 if (ASIC_IS_DCE3(rdev)) {
3183 tmp = RREG32(DC_HPD3_INT_CONTROL);
3184 tmp |= DC_HPDx_INT_ACK;
3185 WREG32(DC_HPD3_INT_CONTROL, tmp);
3186 } else {
3187 tmp = RREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL);
3188 tmp |= DC_HPDx_INT_ACK;
3189 WREG32(DC_HOT_PLUG_DETECT3_INT_CONTROL, tmp);
3190 }
3191 }
3192 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) {
3193 tmp = RREG32(DC_HPD4_INT_CONTROL);
3194 tmp |= DC_HPDx_INT_ACK;
3195 WREG32(DC_HPD4_INT_CONTROL, tmp);
3196 }
3197 if (ASIC_IS_DCE32(rdev)) {
3198 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) {
3199 tmp = RREG32(DC_HPD5_INT_CONTROL);
3200 tmp |= DC_HPDx_INT_ACK;
3201 WREG32(DC_HPD5_INT_CONTROL, tmp);
3202 }
3203 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) {
3204 tmp = RREG32(DC_HPD5_INT_CONTROL);
3205 tmp |= DC_HPDx_INT_ACK;
3206 WREG32(DC_HPD6_INT_CONTROL, tmp);
3207 }
3208 if (rdev->irq.stat_regs.r600.hdmi0_status & AFMT_AZ_FORMAT_WTRIG) {
3209 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0);
3210 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
3211 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET0, tmp);
3212 }
3213 if (rdev->irq.stat_regs.r600.hdmi1_status & AFMT_AZ_FORMAT_WTRIG) {
3214 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1);
3215 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
3216 WREG32(AFMT_AUDIO_PACKET_CONTROL + DCE3_HDMI_OFFSET1, tmp);
3217 }
3218 } else {
3219 if (rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG) {
3220 tmp = RREG32(HDMI0_AUDIO_PACKET_CONTROL);
3221 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
3222 WREG32(HDMI0_AUDIO_PACKET_CONTROL, tmp);
3223 }
3224 if (rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG) {
3225 if (ASIC_IS_DCE3(rdev)) {
3226 tmp = RREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL);
3227 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
3228 WREG32(DCE3_HDMI1_AUDIO_PACKET_CONTROL, tmp);
3229 } else {
3230 tmp = RREG32(HDMI1_AUDIO_PACKET_CONTROL);
3231 tmp |= HDMI0_AZ_FORMAT_WTRIG_ACK;
3232 WREG32(HDMI1_AUDIO_PACKET_CONTROL, tmp);
3233 }
3234 }
3235 }
3236}
3237
3238void r600_irq_disable(struct radeon_device *rdev)
3239{
3240 r600_disable_interrupts(rdev);
3241
3242 mdelay(1);
3243 r600_irq_ack(rdev);
3244 r600_disable_interrupt_state(rdev);
3245}
3246
3247static u32 r600_get_ih_wptr(struct radeon_device *rdev)
3248{
3249 u32 wptr, tmp;
3250
3251 if (rdev->wb.enabled)
3252 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
3253 else
3254 wptr = RREG32(IH_RB_WPTR);
3255
3256 if (wptr & RB_OVERFLOW) {
3257
3258
3259
3260
3261 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
3262 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
3263 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
3264 tmp = RREG32(IH_RB_CNTL);
3265 tmp |= IH_WPTR_OVERFLOW_CLEAR;
3266 WREG32(IH_RB_CNTL, tmp);
3267 }
3268 return (wptr & rdev->ih.ptr_mask);
3269}
3270
3271
3272
3273
3274
3275
3276
3277
3278
3279
3280
3281
3282
3283
3284
3285
3286
3287
3288
3289
3290
3291
3292
3293
3294
3295
3296
3297
3298
3299
3300
3301int r600_irq_process(struct radeon_device *rdev)
3302{
3303 u32 wptr;
3304 u32 rptr;
3305 u32 src_id, src_data;
3306 u32 ring_index;
3307 bool queue_hotplug = false;
3308 bool queue_hdmi = false;
3309
3310 if (!rdev->ih.enabled || rdev->shutdown)
3311 return IRQ_NONE;
3312
3313
3314 if (!rdev->msi_enabled)
3315 RREG32(IH_RB_WPTR);
3316
3317 wptr = r600_get_ih_wptr(rdev);
3318
3319restart_ih:
3320
3321 if (atomic_xchg(&rdev->ih.lock, 1))
3322 return IRQ_NONE;
3323
3324 rptr = rdev->ih.rptr;
3325 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
3326
3327
3328 rmb();
3329
3330
3331 r600_irq_ack(rdev);
3332
3333 while (rptr != wptr) {
3334
3335 ring_index = rptr / 4;
3336 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
3337 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
3338
3339 switch (src_id) {
3340 case 1:
3341 switch (src_data) {
3342 case 0:
3343 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VBLANK_INTERRUPT) {
3344 if (rdev->irq.crtc_vblank_int[0]) {
3345 drm_handle_vblank(rdev->ddev, 0);
3346 rdev->pm.vblank_sync = true;
3347 wake_up(&rdev->irq.vblank_queue);
3348 }
3349 if (atomic_read(&rdev->irq.pflip[0]))
3350 radeon_crtc_handle_flip(rdev, 0);
3351 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
3352 DRM_DEBUG("IH: D1 vblank\n");
3353 }
3354 break;
3355 case 1:
3356 if (rdev->irq.stat_regs.r600.disp_int & LB_D1_VLINE_INTERRUPT) {
3357 rdev->irq.stat_regs.r600.disp_int &= ~LB_D1_VLINE_INTERRUPT;
3358 DRM_DEBUG("IH: D1 vline\n");
3359 }
3360 break;
3361 default:
3362 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3363 break;
3364 }
3365 break;
3366 case 5:
3367 switch (src_data) {
3368 case 0:
3369 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VBLANK_INTERRUPT) {
3370 if (rdev->irq.crtc_vblank_int[1]) {
3371 drm_handle_vblank(rdev->ddev, 1);
3372 rdev->pm.vblank_sync = true;
3373 wake_up(&rdev->irq.vblank_queue);
3374 }
3375 if (atomic_read(&rdev->irq.pflip[1]))
3376 radeon_crtc_handle_flip(rdev, 1);
3377 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VBLANK_INTERRUPT;
3378 DRM_DEBUG("IH: D2 vblank\n");
3379 }
3380 break;
3381 case 1:
3382 if (rdev->irq.stat_regs.r600.disp_int & LB_D2_VLINE_INTERRUPT) {
3383 rdev->irq.stat_regs.r600.disp_int &= ~LB_D2_VLINE_INTERRUPT;
3384 DRM_DEBUG("IH: D2 vline\n");
3385 }
3386 break;
3387 default:
3388 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3389 break;
3390 }
3391 break;
3392 case 19:
3393 switch (src_data) {
3394 case 0:
3395 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD1_INTERRUPT) {
3396 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD1_INTERRUPT;
3397 queue_hotplug = true;
3398 DRM_DEBUG("IH: HPD1\n");
3399 }
3400 break;
3401 case 1:
3402 if (rdev->irq.stat_regs.r600.disp_int & DC_HPD2_INTERRUPT) {
3403 rdev->irq.stat_regs.r600.disp_int &= ~DC_HPD2_INTERRUPT;
3404 queue_hotplug = true;
3405 DRM_DEBUG("IH: HPD2\n");
3406 }
3407 break;
3408 case 4:
3409 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD3_INTERRUPT) {
3410 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD3_INTERRUPT;
3411 queue_hotplug = true;
3412 DRM_DEBUG("IH: HPD3\n");
3413 }
3414 break;
3415 case 5:
3416 if (rdev->irq.stat_regs.r600.disp_int_cont & DC_HPD4_INTERRUPT) {
3417 rdev->irq.stat_regs.r600.disp_int_cont &= ~DC_HPD4_INTERRUPT;
3418 queue_hotplug = true;
3419 DRM_DEBUG("IH: HPD4\n");
3420 }
3421 break;
3422 case 10:
3423 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD5_INTERRUPT) {
3424 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD5_INTERRUPT;
3425 queue_hotplug = true;
3426 DRM_DEBUG("IH: HPD5\n");
3427 }
3428 break;
3429 case 12:
3430 if (rdev->irq.stat_regs.r600.disp_int_cont2 & DC_HPD6_INTERRUPT) {
3431 rdev->irq.stat_regs.r600.disp_int_cont2 &= ~DC_HPD6_INTERRUPT;
3432 queue_hotplug = true;
3433 DRM_DEBUG("IH: HPD6\n");
3434 }
3435 break;
3436 default:
3437 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3438 break;
3439 }
3440 break;
3441 case 21:
3442 switch (src_data) {
3443 case 4:
3444 if (rdev->irq.stat_regs.r600.hdmi0_status & HDMI0_AZ_FORMAT_WTRIG) {
3445 rdev->irq.stat_regs.r600.hdmi0_status &= ~HDMI0_AZ_FORMAT_WTRIG;
3446 queue_hdmi = true;
3447 DRM_DEBUG("IH: HDMI0\n");
3448 }
3449 break;
3450 case 5:
3451 if (rdev->irq.stat_regs.r600.hdmi1_status & HDMI0_AZ_FORMAT_WTRIG) {
3452 rdev->irq.stat_regs.r600.hdmi1_status &= ~HDMI0_AZ_FORMAT_WTRIG;
3453 queue_hdmi = true;
3454 DRM_DEBUG("IH: HDMI1\n");
3455 }
3456 break;
3457 default:
3458 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
3459 break;
3460 }
3461 break;
3462 case 176:
3463 case 177:
3464 case 178:
3465 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
3466 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3467 break;
3468 case 181:
3469 DRM_DEBUG("IH: CP EOP\n");
3470 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
3471 break;
3472 case 233:
3473 DRM_DEBUG("IH: GUI idle\n");
3474 break;
3475 default:
3476 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
3477 break;
3478 }
3479
3480
3481 rptr += 16;
3482 rptr &= rdev->ih.ptr_mask;
3483 }
3484 if (queue_hotplug)
3485 schedule_work(&rdev->hotplug_work);
3486 if (queue_hdmi)
3487 schedule_work(&rdev->audio_work);
3488 rdev->ih.rptr = rptr;
3489 WREG32(IH_RB_RPTR, rdev->ih.rptr);
3490 atomic_set(&rdev->ih.lock, 0);
3491
3492
3493 wptr = r600_get_ih_wptr(rdev);
3494 if (wptr != rptr)
3495 goto restart_ih;
3496
3497 return IRQ_HANDLED;
3498}
3499
3500
3501
3502
3503#if defined(CONFIG_DEBUG_FS)
3504
3505static int r600_debugfs_mc_info(struct seq_file *m, void *data)
3506{
3507 struct drm_info_node *node = (struct drm_info_node *) m->private;
3508 struct drm_device *dev = node->minor->dev;
3509 struct radeon_device *rdev = dev->dev_private;
3510
3511 DREG32_SYS(m, rdev, R_000E50_SRBM_STATUS);
3512 DREG32_SYS(m, rdev, VM_L2_STATUS);
3513 return 0;
3514}
3515
3516static struct drm_info_list r600_mc_info_list[] = {
3517 {"r600_mc_info", r600_debugfs_mc_info, 0, NULL},
3518};
3519#endif
3520
3521int r600_debugfs_mc_info_init(struct radeon_device *rdev)
3522{
3523#if defined(CONFIG_DEBUG_FS)
3524 return radeon_debugfs_add_files(rdev, r600_mc_info_list, ARRAY_SIZE(r600_mc_info_list));
3525#else
3526 return 0;
3527#endif
3528}
3529
3530
3531
3532
3533
3534
3535
3536
3537
3538
3539
3540void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo)
3541{
3542
3543
3544
3545
3546
3547 if ((rdev->family >= CHIP_RV770) && (rdev->family <= CHIP_RV740) &&
3548 rdev->vram_scratch.ptr && !(rdev->flags & RADEON_IS_AGP)) {
3549 void __iomem *ptr = (void *)rdev->vram_scratch.ptr;
3550 u32 tmp;
3551
3552 WREG32(HDP_DEBUG1, 0);
3553 tmp = readl((void __iomem *)ptr);
3554 } else
3555 WREG32(R_005480_HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
3556}
3557
3558void r600_set_pcie_lanes(struct radeon_device *rdev, int lanes)
3559{
3560 u32 link_width_cntl, mask, target_reg;
3561
3562 if (rdev->flags & RADEON_IS_IGP)
3563 return;
3564
3565 if (!(rdev->flags & RADEON_IS_PCIE))
3566 return;
3567
3568
3569 if (ASIC_IS_X2(rdev))
3570 return;
3571
3572
3573
3574 switch (lanes) {
3575 case 0:
3576 mask = RADEON_PCIE_LC_LINK_WIDTH_X0;
3577 break;
3578 case 1:
3579 mask = RADEON_PCIE_LC_LINK_WIDTH_X1;
3580 break;
3581 case 2:
3582 mask = RADEON_PCIE_LC_LINK_WIDTH_X2;
3583 break;
3584 case 4:
3585 mask = RADEON_PCIE_LC_LINK_WIDTH_X4;
3586 break;
3587 case 8:
3588 mask = RADEON_PCIE_LC_LINK_WIDTH_X8;
3589 break;
3590 case 12:
3591 mask = RADEON_PCIE_LC_LINK_WIDTH_X12;
3592 break;
3593 case 16:
3594 default:
3595 mask = RADEON_PCIE_LC_LINK_WIDTH_X16;
3596 break;
3597 }
3598
3599 link_width_cntl = RREG32_PCIE_P(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
3600
3601 if ((link_width_cntl & RADEON_PCIE_LC_LINK_WIDTH_RD_MASK) ==
3602 (mask << RADEON_PCIE_LC_LINK_WIDTH_RD_SHIFT))
3603 return;
3604
3605 if (link_width_cntl & R600_PCIE_LC_UPCONFIGURE_DIS)
3606 return;
3607
3608 link_width_cntl &= ~(RADEON_PCIE_LC_LINK_WIDTH_MASK |
3609 RADEON_PCIE_LC_RECONFIG_NOW |
3610 R600_PCIE_LC_RENEGOTIATE_EN |
3611 R600_PCIE_LC_RECONFIG_ARC_MISSING_ESCAPE);
3612 link_width_cntl |= mask;
3613
3614 WREG32_PCIE_P(RADEON_PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
3615
3616
3617
3618
3619
3620 if (link_width_cntl & R600_PCIE_LC_RENEGOTIATION_SUPPORT)
3621 link_width_cntl |= R600_PCIE_LC_RENEGOTIATE_EN | R600_PCIE_LC_UPCONFIGURE_SUPPORT;
3622 else
3623 link_width_cntl |= R600_PCIE_LC_RECONFIG_ARC_MISSING_ESCAPE;
3624
3625 WREG32_PCIE_P(RADEON_PCIE_LC_LINK_WIDTH_CNTL, (link_width_cntl |
3626 RADEON_PCIE_LC_RECONFIG_NOW));
3627
3628 if (rdev->family >= CHIP_RV770)
3629 target_reg = R700_TARGET_AND_CURRENT_PROFILE_INDEX;
3630 else
3631 target_reg = R600_TARGET_AND_CURRENT_PROFILE_INDEX;
3632
3633
3634 link_width_cntl = RREG32(target_reg);
3635 while (link_width_cntl == 0xffffffff)
3636 link_width_cntl = RREG32(target_reg);
3637
3638}
3639
3640int r600_get_pcie_lanes(struct radeon_device *rdev)
3641{
3642 u32 link_width_cntl;
3643
3644 if (rdev->flags & RADEON_IS_IGP)
3645 return 0;
3646
3647 if (!(rdev->flags & RADEON_IS_PCIE))
3648 return 0;
3649
3650
3651 if (ASIC_IS_X2(rdev))
3652 return 0;
3653
3654
3655
3656 link_width_cntl = RREG32_PCIE_P(RADEON_PCIE_LC_LINK_WIDTH_CNTL);
3657
3658 switch ((link_width_cntl & RADEON_PCIE_LC_LINK_WIDTH_RD_MASK) >> RADEON_PCIE_LC_LINK_WIDTH_RD_SHIFT) {
3659 case RADEON_PCIE_LC_LINK_WIDTH_X0:
3660 return 0;
3661 case RADEON_PCIE_LC_LINK_WIDTH_X1:
3662 return 1;
3663 case RADEON_PCIE_LC_LINK_WIDTH_X2:
3664 return 2;
3665 case RADEON_PCIE_LC_LINK_WIDTH_X4:
3666 return 4;
3667 case RADEON_PCIE_LC_LINK_WIDTH_X8:
3668 return 8;
3669 case RADEON_PCIE_LC_LINK_WIDTH_X16:
3670 default:
3671 return 16;
3672 }
3673}
3674
3675static void r600_pcie_gen2_enable(struct radeon_device *rdev)
3676{
3677 u32 link_width_cntl, lanes, speed_cntl, training_cntl, tmp;
3678 u16 link_cntl2;
3679 u32 mask;
3680 int ret;
3681
3682 if (radeon_pcie_gen2 == 0)
3683 return;
3684
3685 if (rdev->flags & RADEON_IS_IGP)
3686 return;
3687
3688 if (!(rdev->flags & RADEON_IS_PCIE))
3689 return;
3690
3691
3692 if (ASIC_IS_X2(rdev))
3693 return;
3694
3695
3696 if (rdev->family <= CHIP_R600)
3697 return;
3698
3699 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
3700 if (ret != 0)
3701 return;
3702
3703 if (!(mask & DRM_PCIE_SPEED_50))
3704 return;
3705
3706 speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3707 if (speed_cntl & LC_CURRENT_DATA_RATE) {
3708 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
3709 return;
3710 }
3711
3712 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
3713
3714
3715 if ((rdev->family == CHIP_RV670) ||
3716 (rdev->family == CHIP_RV620) ||
3717 (rdev->family == CHIP_RV635)) {
3718
3719 link_width_cntl = RREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL);
3720 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
3721 WREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
3722 link_width_cntl = RREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL);
3723 if (link_width_cntl & LC_RENEGOTIATION_SUPPORT) {
3724 lanes = (link_width_cntl & LC_LINK_WIDTH_RD_MASK) >> LC_LINK_WIDTH_RD_SHIFT;
3725 link_width_cntl &= ~(LC_LINK_WIDTH_MASK |
3726 LC_RECONFIG_ARC_MISSING_ESCAPE);
3727 link_width_cntl |= lanes | LC_RECONFIG_NOW | LC_RENEGOTIATE_EN;
3728 WREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
3729 } else {
3730 link_width_cntl |= LC_UPCONFIGURE_DIS;
3731 WREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
3732 }
3733 }
3734
3735 speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3736 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3737 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3738
3739
3740 if ((rdev->family == CHIP_RV670) ||
3741 (rdev->family == CHIP_RV620) ||
3742 (rdev->family == CHIP_RV635)) {
3743 WREG32(MM_CFGREGS_CNTL, 0x8);
3744 link_cntl2 = RREG32(0x4088);
3745 WREG32(MM_CFGREGS_CNTL, 0);
3746
3747 if (link_cntl2 & SELECTABLE_DEEMPHASIS)
3748 return;
3749 }
3750
3751 speed_cntl &= ~LC_SPEED_CHANGE_ATTEMPTS_ALLOWED_MASK;
3752 speed_cntl |= (0x3 << LC_SPEED_CHANGE_ATTEMPTS_ALLOWED_SHIFT);
3753 speed_cntl &= ~LC_VOLTAGE_TIMER_SEL_MASK;
3754 speed_cntl &= ~LC_FORCE_DIS_HW_SPEED_CHANGE;
3755 speed_cntl |= LC_FORCE_EN_HW_SPEED_CHANGE;
3756 WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3757
3758 tmp = RREG32(0x541c);
3759 WREG32(0x541c, tmp | 0x8);
3760 WREG32(MM_CFGREGS_CNTL, MM_WR_TO_CFG_EN);
3761 link_cntl2 = RREG16(0x4088);
3762 link_cntl2 &= ~TARGET_LINK_SPEED_MASK;
3763 link_cntl2 |= 0x2;
3764 WREG16(0x4088, link_cntl2);
3765 WREG32(MM_CFGREGS_CNTL, 0);
3766
3767 if ((rdev->family == CHIP_RV670) ||
3768 (rdev->family == CHIP_RV620) ||
3769 (rdev->family == CHIP_RV635)) {
3770 training_cntl = RREG32_PCIE_P(PCIE_LC_TRAINING_CNTL);
3771 training_cntl &= ~LC_POINT_7_PLUS_EN;
3772 WREG32_PCIE_P(PCIE_LC_TRAINING_CNTL, training_cntl);
3773 } else {
3774 speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3775 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
3776 WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3777 }
3778
3779 speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3780 speed_cntl |= LC_GEN2_EN_STRAP;
3781 WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3782
3783 } else {
3784 link_width_cntl = RREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL);
3785
3786 if (1)
3787 link_width_cntl |= LC_UPCONFIGURE_DIS;
3788 else
3789 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
3790 WREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
3791 }
3792}
3793
3794
3795
3796
3797
3798
3799
3800
3801
3802uint64_t r600_get_gpu_clock(struct radeon_device *rdev)
3803{
3804 uint64_t clock;
3805
3806 mutex_lock(&rdev->gpu_clock_mutex);
3807 WREG32(RLC_CAPTURE_GPU_CLOCK_COUNT, 1);
3808 clock = (uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_LSB) |
3809 ((uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_MSB) << 32ULL);
3810 mutex_unlock(&rdev->gpu_clock_mutex);
3811 return clock;
3812}
3813