linux/drivers/gpu/drm/radeon/evergreen.c
<<
>>
Prefs
   1/*
   2 * Copyright 2010 Advanced Micro Devices, Inc.
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
  10 *
  11 * The above copyright notice and this permission notice shall be included in
  12 * all copies or substantial portions of the Software.
  13 *
  14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20 * OTHER DEALINGS IN THE SOFTWARE.
  21 *
  22 * Authors: Alex Deucher
  23 */
  24#include <linux/firmware.h>
  25#include <linux/slab.h>
  26#include <drm/drmP.h>
  27#include "radeon.h"
  28#include "radeon_asic.h"
  29#include "radeon_audio.h"
  30#include <drm/radeon_drm.h>
  31#include "evergreend.h"
  32#include "atom.h"
  33#include "avivod.h"
  34#include "evergreen_reg.h"
  35#include "evergreen_blit_shaders.h"
  36#include "radeon_ucode.h"
  37
  38#define DC_HPDx_CONTROL(x)        (DC_HPD1_CONTROL     + (x * 0xc))
  39#define DC_HPDx_INT_CONTROL(x)    (DC_HPD1_INT_CONTROL + (x * 0xc))
  40#define DC_HPDx_INT_STATUS_REG(x) (DC_HPD1_INT_STATUS  + (x * 0xc))
  41
  42/*
  43 * Indirect registers accessor
  44 */
  45u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
  46{
  47        unsigned long flags;
  48        u32 r;
  49
  50        spin_lock_irqsave(&rdev->cg_idx_lock, flags);
  51        WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
  52        r = RREG32(EVERGREEN_CG_IND_DATA);
  53        spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
  54        return r;
  55}
  56
  57void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
  58{
  59        unsigned long flags;
  60
  61        spin_lock_irqsave(&rdev->cg_idx_lock, flags);
  62        WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
  63        WREG32(EVERGREEN_CG_IND_DATA, (v));
  64        spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
  65}
  66
  67u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
  68{
  69        unsigned long flags;
  70        u32 r;
  71
  72        spin_lock_irqsave(&rdev->pif_idx_lock, flags);
  73        WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
  74        r = RREG32(EVERGREEN_PIF_PHY0_DATA);
  75        spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
  76        return r;
  77}
  78
  79void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
  80{
  81        unsigned long flags;
  82
  83        spin_lock_irqsave(&rdev->pif_idx_lock, flags);
  84        WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
  85        WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
  86        spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
  87}
  88
  89u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
  90{
  91        unsigned long flags;
  92        u32 r;
  93
  94        spin_lock_irqsave(&rdev->pif_idx_lock, flags);
  95        WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
  96        r = RREG32(EVERGREEN_PIF_PHY1_DATA);
  97        spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
  98        return r;
  99}
 100
 101void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
 102{
 103        unsigned long flags;
 104
 105        spin_lock_irqsave(&rdev->pif_idx_lock, flags);
 106        WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
 107        WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
 108        spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
 109}
 110
 111static const u32 crtc_offsets[6] =
 112{
 113        EVERGREEN_CRTC0_REGISTER_OFFSET,
 114        EVERGREEN_CRTC1_REGISTER_OFFSET,
 115        EVERGREEN_CRTC2_REGISTER_OFFSET,
 116        EVERGREEN_CRTC3_REGISTER_OFFSET,
 117        EVERGREEN_CRTC4_REGISTER_OFFSET,
 118        EVERGREEN_CRTC5_REGISTER_OFFSET
 119};
 120
 121#include "clearstate_evergreen.h"
 122
 123static const u32 sumo_rlc_save_restore_register_list[] =
 124{
 125        0x98fc,
 126        0x9830,
 127        0x9834,
 128        0x9838,
 129        0x9870,
 130        0x9874,
 131        0x8a14,
 132        0x8b24,
 133        0x8bcc,
 134        0x8b10,
 135        0x8d00,
 136        0x8d04,
 137        0x8c00,
 138        0x8c04,
 139        0x8c08,
 140        0x8c0c,
 141        0x8d8c,
 142        0x8c20,
 143        0x8c24,
 144        0x8c28,
 145        0x8c18,
 146        0x8c1c,
 147        0x8cf0,
 148        0x8e2c,
 149        0x8e38,
 150        0x8c30,
 151        0x9508,
 152        0x9688,
 153        0x9608,
 154        0x960c,
 155        0x9610,
 156        0x9614,
 157        0x88c4,
 158        0x88d4,
 159        0xa008,
 160        0x900c,
 161        0x9100,
 162        0x913c,
 163        0x98f8,
 164        0x98f4,
 165        0x9b7c,
 166        0x3f8c,
 167        0x8950,
 168        0x8954,
 169        0x8a18,
 170        0x8b28,
 171        0x9144,
 172        0x9148,
 173        0x914c,
 174        0x3f90,
 175        0x3f94,
 176        0x915c,
 177        0x9160,
 178        0x9178,
 179        0x917c,
 180        0x9180,
 181        0x918c,
 182        0x9190,
 183        0x9194,
 184        0x9198,
 185        0x919c,
 186        0x91a8,
 187        0x91ac,
 188        0x91b0,
 189        0x91b4,
 190        0x91b8,
 191        0x91c4,
 192        0x91c8,
 193        0x91cc,
 194        0x91d0,
 195        0x91d4,
 196        0x91e0,
 197        0x91e4,
 198        0x91ec,
 199        0x91f0,
 200        0x91f4,
 201        0x9200,
 202        0x9204,
 203        0x929c,
 204        0x9150,
 205        0x802c,
 206};
 207
 208static void evergreen_gpu_init(struct radeon_device *rdev);
 209void evergreen_fini(struct radeon_device *rdev);
 210void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
 211void evergreen_program_aspm(struct radeon_device *rdev);
 212extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
 213                                     int ring, u32 cp_int_cntl);
 214extern void cayman_vm_decode_fault(struct radeon_device *rdev,
 215                                   u32 status, u32 addr);
 216void cik_init_cp_pg_table(struct radeon_device *rdev);
 217
 218extern u32 si_get_csb_size(struct radeon_device *rdev);
 219extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
 220extern u32 cik_get_csb_size(struct radeon_device *rdev);
 221extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
 222extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
 223
 224static const u32 evergreen_golden_registers[] =
 225{
 226        0x3f90, 0xffff0000, 0xff000000,
 227        0x9148, 0xffff0000, 0xff000000,
 228        0x3f94, 0xffff0000, 0xff000000,
 229        0x914c, 0xffff0000, 0xff000000,
 230        0x9b7c, 0xffffffff, 0x00000000,
 231        0x8a14, 0xffffffff, 0x00000007,
 232        0x8b10, 0xffffffff, 0x00000000,
 233        0x960c, 0xffffffff, 0x54763210,
 234        0x88c4, 0xffffffff, 0x000000c2,
 235        0x88d4, 0xffffffff, 0x00000010,
 236        0x8974, 0xffffffff, 0x00000000,
 237        0xc78, 0x00000080, 0x00000080,
 238        0x5eb4, 0xffffffff, 0x00000002,
 239        0x5e78, 0xffffffff, 0x001000f0,
 240        0x6104, 0x01000300, 0x00000000,
 241        0x5bc0, 0x00300000, 0x00000000,
 242        0x7030, 0xffffffff, 0x00000011,
 243        0x7c30, 0xffffffff, 0x00000011,
 244        0x10830, 0xffffffff, 0x00000011,
 245        0x11430, 0xffffffff, 0x00000011,
 246        0x12030, 0xffffffff, 0x00000011,
 247        0x12c30, 0xffffffff, 0x00000011,
 248        0xd02c, 0xffffffff, 0x08421000,
 249        0x240c, 0xffffffff, 0x00000380,
 250        0x8b24, 0xffffffff, 0x00ff0fff,
 251        0x28a4c, 0x06000000, 0x06000000,
 252        0x10c, 0x00000001, 0x00000001,
 253        0x8d00, 0xffffffff, 0x100e4848,
 254        0x8d04, 0xffffffff, 0x00164745,
 255        0x8c00, 0xffffffff, 0xe4000003,
 256        0x8c04, 0xffffffff, 0x40600060,
 257        0x8c08, 0xffffffff, 0x001c001c,
 258        0x8cf0, 0xffffffff, 0x08e00620,
 259        0x8c20, 0xffffffff, 0x00800080,
 260        0x8c24, 0xffffffff, 0x00800080,
 261        0x8c18, 0xffffffff, 0x20202078,
 262        0x8c1c, 0xffffffff, 0x00001010,
 263        0x28350, 0xffffffff, 0x00000000,
 264        0xa008, 0xffffffff, 0x00010000,
 265        0x5c4, 0xffffffff, 0x00000001,
 266        0x9508, 0xffffffff, 0x00000002,
 267        0x913c, 0x0000000f, 0x0000000a
 268};
 269
 270static const u32 evergreen_golden_registers2[] =
 271{
 272        0x2f4c, 0xffffffff, 0x00000000,
 273        0x54f4, 0xffffffff, 0x00000000,
 274        0x54f0, 0xffffffff, 0x00000000,
 275        0x5498, 0xffffffff, 0x00000000,
 276        0x549c, 0xffffffff, 0x00000000,
 277        0x5494, 0xffffffff, 0x00000000,
 278        0x53cc, 0xffffffff, 0x00000000,
 279        0x53c8, 0xffffffff, 0x00000000,
 280        0x53c4, 0xffffffff, 0x00000000,
 281        0x53c0, 0xffffffff, 0x00000000,
 282        0x53bc, 0xffffffff, 0x00000000,
 283        0x53b8, 0xffffffff, 0x00000000,
 284        0x53b4, 0xffffffff, 0x00000000,
 285        0x53b0, 0xffffffff, 0x00000000
 286};
 287
 288static const u32 cypress_mgcg_init[] =
 289{
 290        0x802c, 0xffffffff, 0xc0000000,
 291        0x5448, 0xffffffff, 0x00000100,
 292        0x55e4, 0xffffffff, 0x00000100,
 293        0x160c, 0xffffffff, 0x00000100,
 294        0x5644, 0xffffffff, 0x00000100,
 295        0xc164, 0xffffffff, 0x00000100,
 296        0x8a18, 0xffffffff, 0x00000100,
 297        0x897c, 0xffffffff, 0x06000100,
 298        0x8b28, 0xffffffff, 0x00000100,
 299        0x9144, 0xffffffff, 0x00000100,
 300        0x9a60, 0xffffffff, 0x00000100,
 301        0x9868, 0xffffffff, 0x00000100,
 302        0x8d58, 0xffffffff, 0x00000100,
 303        0x9510, 0xffffffff, 0x00000100,
 304        0x949c, 0xffffffff, 0x00000100,
 305        0x9654, 0xffffffff, 0x00000100,
 306        0x9030, 0xffffffff, 0x00000100,
 307        0x9034, 0xffffffff, 0x00000100,
 308        0x9038, 0xffffffff, 0x00000100,
 309        0x903c, 0xffffffff, 0x00000100,
 310        0x9040, 0xffffffff, 0x00000100,
 311        0xa200, 0xffffffff, 0x00000100,
 312        0xa204, 0xffffffff, 0x00000100,
 313        0xa208, 0xffffffff, 0x00000100,
 314        0xa20c, 0xffffffff, 0x00000100,
 315        0x971c, 0xffffffff, 0x00000100,
 316        0x977c, 0xffffffff, 0x00000100,
 317        0x3f80, 0xffffffff, 0x00000100,
 318        0xa210, 0xffffffff, 0x00000100,
 319        0xa214, 0xffffffff, 0x00000100,
 320        0x4d8, 0xffffffff, 0x00000100,
 321        0x9784, 0xffffffff, 0x00000100,
 322        0x9698, 0xffffffff, 0x00000100,
 323        0x4d4, 0xffffffff, 0x00000200,
 324        0x30cc, 0xffffffff, 0x00000100,
 325        0xd0c0, 0xffffffff, 0xff000100,
 326        0x802c, 0xffffffff, 0x40000000,
 327        0x915c, 0xffffffff, 0x00010000,
 328        0x9160, 0xffffffff, 0x00030002,
 329        0x9178, 0xffffffff, 0x00070000,
 330        0x917c, 0xffffffff, 0x00030002,
 331        0x9180, 0xffffffff, 0x00050004,
 332        0x918c, 0xffffffff, 0x00010006,
 333        0x9190, 0xffffffff, 0x00090008,
 334        0x9194, 0xffffffff, 0x00070000,
 335        0x9198, 0xffffffff, 0x00030002,
 336        0x919c, 0xffffffff, 0x00050004,
 337        0x91a8, 0xffffffff, 0x00010006,
 338        0x91ac, 0xffffffff, 0x00090008,
 339        0x91b0, 0xffffffff, 0x00070000,
 340        0x91b4, 0xffffffff, 0x00030002,
 341        0x91b8, 0xffffffff, 0x00050004,
 342        0x91c4, 0xffffffff, 0x00010006,
 343        0x91c8, 0xffffffff, 0x00090008,
 344        0x91cc, 0xffffffff, 0x00070000,
 345        0x91d0, 0xffffffff, 0x00030002,
 346        0x91d4, 0xffffffff, 0x00050004,
 347        0x91e0, 0xffffffff, 0x00010006,
 348        0x91e4, 0xffffffff, 0x00090008,
 349        0x91e8, 0xffffffff, 0x00000000,
 350        0x91ec, 0xffffffff, 0x00070000,
 351        0x91f0, 0xffffffff, 0x00030002,
 352        0x91f4, 0xffffffff, 0x00050004,
 353        0x9200, 0xffffffff, 0x00010006,
 354        0x9204, 0xffffffff, 0x00090008,
 355        0x9208, 0xffffffff, 0x00070000,
 356        0x920c, 0xffffffff, 0x00030002,
 357        0x9210, 0xffffffff, 0x00050004,
 358        0x921c, 0xffffffff, 0x00010006,
 359        0x9220, 0xffffffff, 0x00090008,
 360        0x9224, 0xffffffff, 0x00070000,
 361        0x9228, 0xffffffff, 0x00030002,
 362        0x922c, 0xffffffff, 0x00050004,
 363        0x9238, 0xffffffff, 0x00010006,
 364        0x923c, 0xffffffff, 0x00090008,
 365        0x9240, 0xffffffff, 0x00070000,
 366        0x9244, 0xffffffff, 0x00030002,
 367        0x9248, 0xffffffff, 0x00050004,
 368        0x9254, 0xffffffff, 0x00010006,
 369        0x9258, 0xffffffff, 0x00090008,
 370        0x925c, 0xffffffff, 0x00070000,
 371        0x9260, 0xffffffff, 0x00030002,
 372        0x9264, 0xffffffff, 0x00050004,
 373        0x9270, 0xffffffff, 0x00010006,
 374        0x9274, 0xffffffff, 0x00090008,
 375        0x9278, 0xffffffff, 0x00070000,
 376        0x927c, 0xffffffff, 0x00030002,
 377        0x9280, 0xffffffff, 0x00050004,
 378        0x928c, 0xffffffff, 0x00010006,
 379        0x9290, 0xffffffff, 0x00090008,
 380        0x9294, 0xffffffff, 0x00000000,
 381        0x929c, 0xffffffff, 0x00000001,
 382        0x802c, 0xffffffff, 0x40010000,
 383        0x915c, 0xffffffff, 0x00010000,
 384        0x9160, 0xffffffff, 0x00030002,
 385        0x9178, 0xffffffff, 0x00070000,
 386        0x917c, 0xffffffff, 0x00030002,
 387        0x9180, 0xffffffff, 0x00050004,
 388        0x918c, 0xffffffff, 0x00010006,
 389        0x9190, 0xffffffff, 0x00090008,
 390        0x9194, 0xffffffff, 0x00070000,
 391        0x9198, 0xffffffff, 0x00030002,
 392        0x919c, 0xffffffff, 0x00050004,
 393        0x91a8, 0xffffffff, 0x00010006,
 394        0x91ac, 0xffffffff, 0x00090008,
 395        0x91b0, 0xffffffff, 0x00070000,
 396        0x91b4, 0xffffffff, 0x00030002,
 397        0x91b8, 0xffffffff, 0x00050004,
 398        0x91c4, 0xffffffff, 0x00010006,
 399        0x91c8, 0xffffffff, 0x00090008,
 400        0x91cc, 0xffffffff, 0x00070000,
 401        0x91d0, 0xffffffff, 0x00030002,
 402        0x91d4, 0xffffffff, 0x00050004,
 403        0x91e0, 0xffffffff, 0x00010006,
 404        0x91e4, 0xffffffff, 0x00090008,
 405        0x91e8, 0xffffffff, 0x00000000,
 406        0x91ec, 0xffffffff, 0x00070000,
 407        0x91f0, 0xffffffff, 0x00030002,
 408        0x91f4, 0xffffffff, 0x00050004,
 409        0x9200, 0xffffffff, 0x00010006,
 410        0x9204, 0xffffffff, 0x00090008,
 411        0x9208, 0xffffffff, 0x00070000,
 412        0x920c, 0xffffffff, 0x00030002,
 413        0x9210, 0xffffffff, 0x00050004,
 414        0x921c, 0xffffffff, 0x00010006,
 415        0x9220, 0xffffffff, 0x00090008,
 416        0x9224, 0xffffffff, 0x00070000,
 417        0x9228, 0xffffffff, 0x00030002,
 418        0x922c, 0xffffffff, 0x00050004,
 419        0x9238, 0xffffffff, 0x00010006,
 420        0x923c, 0xffffffff, 0x00090008,
 421        0x9240, 0xffffffff, 0x00070000,
 422        0x9244, 0xffffffff, 0x00030002,
 423        0x9248, 0xffffffff, 0x00050004,
 424        0x9254, 0xffffffff, 0x00010006,
 425        0x9258, 0xffffffff, 0x00090008,
 426        0x925c, 0xffffffff, 0x00070000,
 427        0x9260, 0xffffffff, 0x00030002,
 428        0x9264, 0xffffffff, 0x00050004,
 429        0x9270, 0xffffffff, 0x00010006,
 430        0x9274, 0xffffffff, 0x00090008,
 431        0x9278, 0xffffffff, 0x00070000,
 432        0x927c, 0xffffffff, 0x00030002,
 433        0x9280, 0xffffffff, 0x00050004,
 434        0x928c, 0xffffffff, 0x00010006,
 435        0x9290, 0xffffffff, 0x00090008,
 436        0x9294, 0xffffffff, 0x00000000,
 437        0x929c, 0xffffffff, 0x00000001,
 438        0x802c, 0xffffffff, 0xc0000000
 439};
 440
 441static const u32 redwood_mgcg_init[] =
 442{
 443        0x802c, 0xffffffff, 0xc0000000,
 444        0x5448, 0xffffffff, 0x00000100,
 445        0x55e4, 0xffffffff, 0x00000100,
 446        0x160c, 0xffffffff, 0x00000100,
 447        0x5644, 0xffffffff, 0x00000100,
 448        0xc164, 0xffffffff, 0x00000100,
 449        0x8a18, 0xffffffff, 0x00000100,
 450        0x897c, 0xffffffff, 0x06000100,
 451        0x8b28, 0xffffffff, 0x00000100,
 452        0x9144, 0xffffffff, 0x00000100,
 453        0x9a60, 0xffffffff, 0x00000100,
 454        0x9868, 0xffffffff, 0x00000100,
 455        0x8d58, 0xffffffff, 0x00000100,
 456        0x9510, 0xffffffff, 0x00000100,
 457        0x949c, 0xffffffff, 0x00000100,
 458        0x9654, 0xffffffff, 0x00000100,
 459        0x9030, 0xffffffff, 0x00000100,
 460        0x9034, 0xffffffff, 0x00000100,
 461        0x9038, 0xffffffff, 0x00000100,
 462        0x903c, 0xffffffff, 0x00000100,
 463        0x9040, 0xffffffff, 0x00000100,
 464        0xa200, 0xffffffff, 0x00000100,
 465        0xa204, 0xffffffff, 0x00000100,
 466        0xa208, 0xffffffff, 0x00000100,
 467        0xa20c, 0xffffffff, 0x00000100,
 468        0x971c, 0xffffffff, 0x00000100,
 469        0x977c, 0xffffffff, 0x00000100,
 470        0x3f80, 0xffffffff, 0x00000100,
 471        0xa210, 0xffffffff, 0x00000100,
 472        0xa214, 0xffffffff, 0x00000100,
 473        0x4d8, 0xffffffff, 0x00000100,
 474        0x9784, 0xffffffff, 0x00000100,
 475        0x9698, 0xffffffff, 0x00000100,
 476        0x4d4, 0xffffffff, 0x00000200,
 477        0x30cc, 0xffffffff, 0x00000100,
 478        0xd0c0, 0xffffffff, 0xff000100,
 479        0x802c, 0xffffffff, 0x40000000,
 480        0x915c, 0xffffffff, 0x00010000,
 481        0x9160, 0xffffffff, 0x00030002,
 482        0x9178, 0xffffffff, 0x00070000,
 483        0x917c, 0xffffffff, 0x00030002,
 484        0x9180, 0xffffffff, 0x00050004,
 485        0x918c, 0xffffffff, 0x00010006,
 486        0x9190, 0xffffffff, 0x00090008,
 487        0x9194, 0xffffffff, 0x00070000,
 488        0x9198, 0xffffffff, 0x00030002,
 489        0x919c, 0xffffffff, 0x00050004,
 490        0x91a8, 0xffffffff, 0x00010006,
 491        0x91ac, 0xffffffff, 0x00090008,
 492        0x91b0, 0xffffffff, 0x00070000,
 493        0x91b4, 0xffffffff, 0x00030002,
 494        0x91b8, 0xffffffff, 0x00050004,
 495        0x91c4, 0xffffffff, 0x00010006,
 496        0x91c8, 0xffffffff, 0x00090008,
 497        0x91cc, 0xffffffff, 0x00070000,
 498        0x91d0, 0xffffffff, 0x00030002,
 499        0x91d4, 0xffffffff, 0x00050004,
 500        0x91e0, 0xffffffff, 0x00010006,
 501        0x91e4, 0xffffffff, 0x00090008,
 502        0x91e8, 0xffffffff, 0x00000000,
 503        0x91ec, 0xffffffff, 0x00070000,
 504        0x91f0, 0xffffffff, 0x00030002,
 505        0x91f4, 0xffffffff, 0x00050004,
 506        0x9200, 0xffffffff, 0x00010006,
 507        0x9204, 0xffffffff, 0x00090008,
 508        0x9294, 0xffffffff, 0x00000000,
 509        0x929c, 0xffffffff, 0x00000001,
 510        0x802c, 0xffffffff, 0xc0000000
 511};
 512
 513static const u32 cedar_golden_registers[] =
 514{
 515        0x3f90, 0xffff0000, 0xff000000,
 516        0x9148, 0xffff0000, 0xff000000,
 517        0x3f94, 0xffff0000, 0xff000000,
 518        0x914c, 0xffff0000, 0xff000000,
 519        0x9b7c, 0xffffffff, 0x00000000,
 520        0x8a14, 0xffffffff, 0x00000007,
 521        0x8b10, 0xffffffff, 0x00000000,
 522        0x960c, 0xffffffff, 0x54763210,
 523        0x88c4, 0xffffffff, 0x000000c2,
 524        0x88d4, 0xffffffff, 0x00000000,
 525        0x8974, 0xffffffff, 0x00000000,
 526        0xc78, 0x00000080, 0x00000080,
 527        0x5eb4, 0xffffffff, 0x00000002,
 528        0x5e78, 0xffffffff, 0x001000f0,
 529        0x6104, 0x01000300, 0x00000000,
 530        0x5bc0, 0x00300000, 0x00000000,
 531        0x7030, 0xffffffff, 0x00000011,
 532        0x7c30, 0xffffffff, 0x00000011,
 533        0x10830, 0xffffffff, 0x00000011,
 534        0x11430, 0xffffffff, 0x00000011,
 535        0xd02c, 0xffffffff, 0x08421000,
 536        0x240c, 0xffffffff, 0x00000380,
 537        0x8b24, 0xffffffff, 0x00ff0fff,
 538        0x28a4c, 0x06000000, 0x06000000,
 539        0x10c, 0x00000001, 0x00000001,
 540        0x8d00, 0xffffffff, 0x100e4848,
 541        0x8d04, 0xffffffff, 0x00164745,
 542        0x8c00, 0xffffffff, 0xe4000003,
 543        0x8c04, 0xffffffff, 0x40600060,
 544        0x8c08, 0xffffffff, 0x001c001c,
 545        0x8cf0, 0xffffffff, 0x08e00410,
 546        0x8c20, 0xffffffff, 0x00800080,
 547        0x8c24, 0xffffffff, 0x00800080,
 548        0x8c18, 0xffffffff, 0x20202078,
 549        0x8c1c, 0xffffffff, 0x00001010,
 550        0x28350, 0xffffffff, 0x00000000,
 551        0xa008, 0xffffffff, 0x00010000,
 552        0x5c4, 0xffffffff, 0x00000001,
 553        0x9508, 0xffffffff, 0x00000002
 554};
 555
 556static const u32 cedar_mgcg_init[] =
 557{
 558        0x802c, 0xffffffff, 0xc0000000,
 559        0x5448, 0xffffffff, 0x00000100,
 560        0x55e4, 0xffffffff, 0x00000100,
 561        0x160c, 0xffffffff, 0x00000100,
 562        0x5644, 0xffffffff, 0x00000100,
 563        0xc164, 0xffffffff, 0x00000100,
 564        0x8a18, 0xffffffff, 0x00000100,
 565        0x897c, 0xffffffff, 0x06000100,
 566        0x8b28, 0xffffffff, 0x00000100,
 567        0x9144, 0xffffffff, 0x00000100,
 568        0x9a60, 0xffffffff, 0x00000100,
 569        0x9868, 0xffffffff, 0x00000100,
 570        0x8d58, 0xffffffff, 0x00000100,
 571        0x9510, 0xffffffff, 0x00000100,
 572        0x949c, 0xffffffff, 0x00000100,
 573        0x9654, 0xffffffff, 0x00000100,
 574        0x9030, 0xffffffff, 0x00000100,
 575        0x9034, 0xffffffff, 0x00000100,
 576        0x9038, 0xffffffff, 0x00000100,
 577        0x903c, 0xffffffff, 0x00000100,
 578        0x9040, 0xffffffff, 0x00000100,
 579        0xa200, 0xffffffff, 0x00000100,
 580        0xa204, 0xffffffff, 0x00000100,
 581        0xa208, 0xffffffff, 0x00000100,
 582        0xa20c, 0xffffffff, 0x00000100,
 583        0x971c, 0xffffffff, 0x00000100,
 584        0x977c, 0xffffffff, 0x00000100,
 585        0x3f80, 0xffffffff, 0x00000100,
 586        0xa210, 0xffffffff, 0x00000100,
 587        0xa214, 0xffffffff, 0x00000100,
 588        0x4d8, 0xffffffff, 0x00000100,
 589        0x9784, 0xffffffff, 0x00000100,
 590        0x9698, 0xffffffff, 0x00000100,
 591        0x4d4, 0xffffffff, 0x00000200,
 592        0x30cc, 0xffffffff, 0x00000100,
 593        0xd0c0, 0xffffffff, 0xff000100,
 594        0x802c, 0xffffffff, 0x40000000,
 595        0x915c, 0xffffffff, 0x00010000,
 596        0x9178, 0xffffffff, 0x00050000,
 597        0x917c, 0xffffffff, 0x00030002,
 598        0x918c, 0xffffffff, 0x00010004,
 599        0x9190, 0xffffffff, 0x00070006,
 600        0x9194, 0xffffffff, 0x00050000,
 601        0x9198, 0xffffffff, 0x00030002,
 602        0x91a8, 0xffffffff, 0x00010004,
 603        0x91ac, 0xffffffff, 0x00070006,
 604        0x91e8, 0xffffffff, 0x00000000,
 605        0x9294, 0xffffffff, 0x00000000,
 606        0x929c, 0xffffffff, 0x00000001,
 607        0x802c, 0xffffffff, 0xc0000000
 608};
 609
 610static const u32 juniper_mgcg_init[] =
 611{
 612        0x802c, 0xffffffff, 0xc0000000,
 613        0x5448, 0xffffffff, 0x00000100,
 614        0x55e4, 0xffffffff, 0x00000100,
 615        0x160c, 0xffffffff, 0x00000100,
 616        0x5644, 0xffffffff, 0x00000100,
 617        0xc164, 0xffffffff, 0x00000100,
 618        0x8a18, 0xffffffff, 0x00000100,
 619        0x897c, 0xffffffff, 0x06000100,
 620        0x8b28, 0xffffffff, 0x00000100,
 621        0x9144, 0xffffffff, 0x00000100,
 622        0x9a60, 0xffffffff, 0x00000100,
 623        0x9868, 0xffffffff, 0x00000100,
 624        0x8d58, 0xffffffff, 0x00000100,
 625        0x9510, 0xffffffff, 0x00000100,
 626        0x949c, 0xffffffff, 0x00000100,
 627        0x9654, 0xffffffff, 0x00000100,
 628        0x9030, 0xffffffff, 0x00000100,
 629        0x9034, 0xffffffff, 0x00000100,
 630        0x9038, 0xffffffff, 0x00000100,
 631        0x903c, 0xffffffff, 0x00000100,
 632        0x9040, 0xffffffff, 0x00000100,
 633        0xa200, 0xffffffff, 0x00000100,
 634        0xa204, 0xffffffff, 0x00000100,
 635        0xa208, 0xffffffff, 0x00000100,
 636        0xa20c, 0xffffffff, 0x00000100,
 637        0x971c, 0xffffffff, 0x00000100,
 638        0xd0c0, 0xffffffff, 0xff000100,
 639        0x802c, 0xffffffff, 0x40000000,
 640        0x915c, 0xffffffff, 0x00010000,
 641        0x9160, 0xffffffff, 0x00030002,
 642        0x9178, 0xffffffff, 0x00070000,
 643        0x917c, 0xffffffff, 0x00030002,
 644        0x9180, 0xffffffff, 0x00050004,
 645        0x918c, 0xffffffff, 0x00010006,
 646        0x9190, 0xffffffff, 0x00090008,
 647        0x9194, 0xffffffff, 0x00070000,
 648        0x9198, 0xffffffff, 0x00030002,
 649        0x919c, 0xffffffff, 0x00050004,
 650        0x91a8, 0xffffffff, 0x00010006,
 651        0x91ac, 0xffffffff, 0x00090008,
 652        0x91b0, 0xffffffff, 0x00070000,
 653        0x91b4, 0xffffffff, 0x00030002,
 654        0x91b8, 0xffffffff, 0x00050004,
 655        0x91c4, 0xffffffff, 0x00010006,
 656        0x91c8, 0xffffffff, 0x00090008,
 657        0x91cc, 0xffffffff, 0x00070000,
 658        0x91d0, 0xffffffff, 0x00030002,
 659        0x91d4, 0xffffffff, 0x00050004,
 660        0x91e0, 0xffffffff, 0x00010006,
 661        0x91e4, 0xffffffff, 0x00090008,
 662        0x91e8, 0xffffffff, 0x00000000,
 663        0x91ec, 0xffffffff, 0x00070000,
 664        0x91f0, 0xffffffff, 0x00030002,
 665        0x91f4, 0xffffffff, 0x00050004,
 666        0x9200, 0xffffffff, 0x00010006,
 667        0x9204, 0xffffffff, 0x00090008,
 668        0x9208, 0xffffffff, 0x00070000,
 669        0x920c, 0xffffffff, 0x00030002,
 670        0x9210, 0xffffffff, 0x00050004,
 671        0x921c, 0xffffffff, 0x00010006,
 672        0x9220, 0xffffffff, 0x00090008,
 673        0x9224, 0xffffffff, 0x00070000,
 674        0x9228, 0xffffffff, 0x00030002,
 675        0x922c, 0xffffffff, 0x00050004,
 676        0x9238, 0xffffffff, 0x00010006,
 677        0x923c, 0xffffffff, 0x00090008,
 678        0x9240, 0xffffffff, 0x00070000,
 679        0x9244, 0xffffffff, 0x00030002,
 680        0x9248, 0xffffffff, 0x00050004,
 681        0x9254, 0xffffffff, 0x00010006,
 682        0x9258, 0xffffffff, 0x00090008,
 683        0x925c, 0xffffffff, 0x00070000,
 684        0x9260, 0xffffffff, 0x00030002,
 685        0x9264, 0xffffffff, 0x00050004,
 686        0x9270, 0xffffffff, 0x00010006,
 687        0x9274, 0xffffffff, 0x00090008,
 688        0x9278, 0xffffffff, 0x00070000,
 689        0x927c, 0xffffffff, 0x00030002,
 690        0x9280, 0xffffffff, 0x00050004,
 691        0x928c, 0xffffffff, 0x00010006,
 692        0x9290, 0xffffffff, 0x00090008,
 693        0x9294, 0xffffffff, 0x00000000,
 694        0x929c, 0xffffffff, 0x00000001,
 695        0x802c, 0xffffffff, 0xc0000000,
 696        0x977c, 0xffffffff, 0x00000100,
 697        0x3f80, 0xffffffff, 0x00000100,
 698        0xa210, 0xffffffff, 0x00000100,
 699        0xa214, 0xffffffff, 0x00000100,
 700        0x4d8, 0xffffffff, 0x00000100,
 701        0x9784, 0xffffffff, 0x00000100,
 702        0x9698, 0xffffffff, 0x00000100,
 703        0x4d4, 0xffffffff, 0x00000200,
 704        0x30cc, 0xffffffff, 0x00000100,
 705        0x802c, 0xffffffff, 0xc0000000
 706};
 707
 708static const u32 supersumo_golden_registers[] =
 709{
 710        0x5eb4, 0xffffffff, 0x00000002,
 711        0x5c4, 0xffffffff, 0x00000001,
 712        0x7030, 0xffffffff, 0x00000011,
 713        0x7c30, 0xffffffff, 0x00000011,
 714        0x6104, 0x01000300, 0x00000000,
 715        0x5bc0, 0x00300000, 0x00000000,
 716        0x8c04, 0xffffffff, 0x40600060,
 717        0x8c08, 0xffffffff, 0x001c001c,
 718        0x8c20, 0xffffffff, 0x00800080,
 719        0x8c24, 0xffffffff, 0x00800080,
 720        0x8c18, 0xffffffff, 0x20202078,
 721        0x8c1c, 0xffffffff, 0x00001010,
 722        0x918c, 0xffffffff, 0x00010006,
 723        0x91a8, 0xffffffff, 0x00010006,
 724        0x91c4, 0xffffffff, 0x00010006,
 725        0x91e0, 0xffffffff, 0x00010006,
 726        0x9200, 0xffffffff, 0x00010006,
 727        0x9150, 0xffffffff, 0x6e944040,
 728        0x917c, 0xffffffff, 0x00030002,
 729        0x9180, 0xffffffff, 0x00050004,
 730        0x9198, 0xffffffff, 0x00030002,
 731        0x919c, 0xffffffff, 0x00050004,
 732        0x91b4, 0xffffffff, 0x00030002,
 733        0x91b8, 0xffffffff, 0x00050004,
 734        0x91d0, 0xffffffff, 0x00030002,
 735        0x91d4, 0xffffffff, 0x00050004,
 736        0x91f0, 0xffffffff, 0x00030002,
 737        0x91f4, 0xffffffff, 0x00050004,
 738        0x915c, 0xffffffff, 0x00010000,
 739        0x9160, 0xffffffff, 0x00030002,
 740        0x3f90, 0xffff0000, 0xff000000,
 741        0x9178, 0xffffffff, 0x00070000,
 742        0x9194, 0xffffffff, 0x00070000,
 743        0x91b0, 0xffffffff, 0x00070000,
 744        0x91cc, 0xffffffff, 0x00070000,
 745        0x91ec, 0xffffffff, 0x00070000,
 746        0x9148, 0xffff0000, 0xff000000,
 747        0x9190, 0xffffffff, 0x00090008,
 748        0x91ac, 0xffffffff, 0x00090008,
 749        0x91c8, 0xffffffff, 0x00090008,
 750        0x91e4, 0xffffffff, 0x00090008,
 751        0x9204, 0xffffffff, 0x00090008,
 752        0x3f94, 0xffff0000, 0xff000000,
 753        0x914c, 0xffff0000, 0xff000000,
 754        0x929c, 0xffffffff, 0x00000001,
 755        0x8a18, 0xffffffff, 0x00000100,
 756        0x8b28, 0xffffffff, 0x00000100,
 757        0x9144, 0xffffffff, 0x00000100,
 758        0x5644, 0xffffffff, 0x00000100,
 759        0x9b7c, 0xffffffff, 0x00000000,
 760        0x8030, 0xffffffff, 0x0000100a,
 761        0x8a14, 0xffffffff, 0x00000007,
 762        0x8b24, 0xffffffff, 0x00ff0fff,
 763        0x8b10, 0xffffffff, 0x00000000,
 764        0x28a4c, 0x06000000, 0x06000000,
 765        0x4d8, 0xffffffff, 0x00000100,
 766        0x913c, 0xffff000f, 0x0100000a,
 767        0x960c, 0xffffffff, 0x54763210,
 768        0x88c4, 0xffffffff, 0x000000c2,
 769        0x88d4, 0xffffffff, 0x00000010,
 770        0x8974, 0xffffffff, 0x00000000,
 771        0xc78, 0x00000080, 0x00000080,
 772        0x5e78, 0xffffffff, 0x001000f0,
 773        0xd02c, 0xffffffff, 0x08421000,
 774        0xa008, 0xffffffff, 0x00010000,
 775        0x8d00, 0xffffffff, 0x100e4848,
 776        0x8d04, 0xffffffff, 0x00164745,
 777        0x8c00, 0xffffffff, 0xe4000003,
 778        0x8cf0, 0x1fffffff, 0x08e00620,
 779        0x28350, 0xffffffff, 0x00000000,
 780        0x9508, 0xffffffff, 0x00000002
 781};
 782
 783static const u32 sumo_golden_registers[] =
 784{
 785        0x900c, 0x00ffffff, 0x0017071f,
 786        0x8c18, 0xffffffff, 0x10101060,
 787        0x8c1c, 0xffffffff, 0x00001010,
 788        0x8c30, 0x0000000f, 0x00000005,
 789        0x9688, 0x0000000f, 0x00000007
 790};
 791
 792static const u32 wrestler_golden_registers[] =
 793{
 794        0x5eb4, 0xffffffff, 0x00000002,
 795        0x5c4, 0xffffffff, 0x00000001,
 796        0x7030, 0xffffffff, 0x00000011,
 797        0x7c30, 0xffffffff, 0x00000011,
 798        0x6104, 0x01000300, 0x00000000,
 799        0x5bc0, 0x00300000, 0x00000000,
 800        0x918c, 0xffffffff, 0x00010006,
 801        0x91a8, 0xffffffff, 0x00010006,
 802        0x9150, 0xffffffff, 0x6e944040,
 803        0x917c, 0xffffffff, 0x00030002,
 804        0x9198, 0xffffffff, 0x00030002,
 805        0x915c, 0xffffffff, 0x00010000,
 806        0x3f90, 0xffff0000, 0xff000000,
 807        0x9178, 0xffffffff, 0x00070000,
 808        0x9194, 0xffffffff, 0x00070000,
 809        0x9148, 0xffff0000, 0xff000000,
 810        0x9190, 0xffffffff, 0x00090008,
 811        0x91ac, 0xffffffff, 0x00090008,
 812        0x3f94, 0xffff0000, 0xff000000,
 813        0x914c, 0xffff0000, 0xff000000,
 814        0x929c, 0xffffffff, 0x00000001,
 815        0x8a18, 0xffffffff, 0x00000100,
 816        0x8b28, 0xffffffff, 0x00000100,
 817        0x9144, 0xffffffff, 0x00000100,
 818        0x9b7c, 0xffffffff, 0x00000000,
 819        0x8030, 0xffffffff, 0x0000100a,
 820        0x8a14, 0xffffffff, 0x00000001,
 821        0x8b24, 0xffffffff, 0x00ff0fff,
 822        0x8b10, 0xffffffff, 0x00000000,
 823        0x28a4c, 0x06000000, 0x06000000,
 824        0x4d8, 0xffffffff, 0x00000100,
 825        0x913c, 0xffff000f, 0x0100000a,
 826        0x960c, 0xffffffff, 0x54763210,
 827        0x88c4, 0xffffffff, 0x000000c2,
 828        0x88d4, 0xffffffff, 0x00000010,
 829        0x8974, 0xffffffff, 0x00000000,
 830        0xc78, 0x00000080, 0x00000080,
 831        0x5e78, 0xffffffff, 0x001000f0,
 832        0xd02c, 0xffffffff, 0x08421000,
 833        0xa008, 0xffffffff, 0x00010000,
 834        0x8d00, 0xffffffff, 0x100e4848,
 835        0x8d04, 0xffffffff, 0x00164745,
 836        0x8c00, 0xffffffff, 0xe4000003,
 837        0x8cf0, 0x1fffffff, 0x08e00410,
 838        0x28350, 0xffffffff, 0x00000000,
 839        0x9508, 0xffffffff, 0x00000002,
 840        0x900c, 0xffffffff, 0x0017071f,
 841        0x8c18, 0xffffffff, 0x10101060,
 842        0x8c1c, 0xffffffff, 0x00001010
 843};
 844
 845static const u32 barts_golden_registers[] =
 846{
 847        0x5eb4, 0xffffffff, 0x00000002,
 848        0x5e78, 0x8f311ff1, 0x001000f0,
 849        0x3f90, 0xffff0000, 0xff000000,
 850        0x9148, 0xffff0000, 0xff000000,
 851        0x3f94, 0xffff0000, 0xff000000,
 852        0x914c, 0xffff0000, 0xff000000,
 853        0xc78, 0x00000080, 0x00000080,
 854        0xbd4, 0x70073777, 0x00010001,
 855        0xd02c, 0xbfffff1f, 0x08421000,
 856        0xd0b8, 0x03773777, 0x02011003,
 857        0x5bc0, 0x00200000, 0x50100000,
 858        0x98f8, 0x33773777, 0x02011003,
 859        0x98fc, 0xffffffff, 0x76543210,
 860        0x7030, 0x31000311, 0x00000011,
 861        0x2f48, 0x00000007, 0x02011003,
 862        0x6b28, 0x00000010, 0x00000012,
 863        0x7728, 0x00000010, 0x00000012,
 864        0x10328, 0x00000010, 0x00000012,
 865        0x10f28, 0x00000010, 0x00000012,
 866        0x11b28, 0x00000010, 0x00000012,
 867        0x12728, 0x00000010, 0x00000012,
 868        0x240c, 0x000007ff, 0x00000380,
 869        0x8a14, 0xf000001f, 0x00000007,
 870        0x8b24, 0x3fff3fff, 0x00ff0fff,
 871        0x8b10, 0x0000ff0f, 0x00000000,
 872        0x28a4c, 0x07ffffff, 0x06000000,
 873        0x10c, 0x00000001, 0x00010003,
 874        0xa02c, 0xffffffff, 0x0000009b,
 875        0x913c, 0x0000000f, 0x0100000a,
 876        0x8d00, 0xffff7f7f, 0x100e4848,
 877        0x8d04, 0x00ffffff, 0x00164745,
 878        0x8c00, 0xfffc0003, 0xe4000003,
 879        0x8c04, 0xf8ff00ff, 0x40600060,
 880        0x8c08, 0x00ff00ff, 0x001c001c,
 881        0x8cf0, 0x1fff1fff, 0x08e00620,
 882        0x8c20, 0x0fff0fff, 0x00800080,
 883        0x8c24, 0x0fff0fff, 0x00800080,
 884        0x8c18, 0xffffffff, 0x20202078,
 885        0x8c1c, 0x0000ffff, 0x00001010,
 886        0x28350, 0x00000f01, 0x00000000,
 887        0x9508, 0x3700001f, 0x00000002,
 888        0x960c, 0xffffffff, 0x54763210,
 889        0x88c4, 0x001f3ae3, 0x000000c2,
 890        0x88d4, 0x0000001f, 0x00000010,
 891        0x8974, 0xffffffff, 0x00000000
 892};
 893
 894static const u32 turks_golden_registers[] =
 895{
 896        0x5eb4, 0xffffffff, 0x00000002,
 897        0x5e78, 0x8f311ff1, 0x001000f0,
 898        0x8c8, 0x00003000, 0x00001070,
 899        0x8cc, 0x000fffff, 0x00040035,
 900        0x3f90, 0xffff0000, 0xfff00000,
 901        0x9148, 0xffff0000, 0xfff00000,
 902        0x3f94, 0xffff0000, 0xfff00000,
 903        0x914c, 0xffff0000, 0xfff00000,
 904        0xc78, 0x00000080, 0x00000080,
 905        0xbd4, 0x00073007, 0x00010002,
 906        0xd02c, 0xbfffff1f, 0x08421000,
 907        0xd0b8, 0x03773777, 0x02010002,
 908        0x5bc0, 0x00200000, 0x50100000,
 909        0x98f8, 0x33773777, 0x00010002,
 910        0x98fc, 0xffffffff, 0x33221100,
 911        0x7030, 0x31000311, 0x00000011,
 912        0x2f48, 0x33773777, 0x00010002,
 913        0x6b28, 0x00000010, 0x00000012,
 914        0x7728, 0x00000010, 0x00000012,
 915        0x10328, 0x00000010, 0x00000012,
 916        0x10f28, 0x00000010, 0x00000012,
 917        0x11b28, 0x00000010, 0x00000012,
 918        0x12728, 0x00000010, 0x00000012,
 919        0x240c, 0x000007ff, 0x00000380,
 920        0x8a14, 0xf000001f, 0x00000007,
 921        0x8b24, 0x3fff3fff, 0x00ff0fff,
 922        0x8b10, 0x0000ff0f, 0x00000000,
 923        0x28a4c, 0x07ffffff, 0x06000000,
 924        0x10c, 0x00000001, 0x00010003,
 925        0xa02c, 0xffffffff, 0x0000009b,
 926        0x913c, 0x0000000f, 0x0100000a,
 927        0x8d00, 0xffff7f7f, 0x100e4848,
 928        0x8d04, 0x00ffffff, 0x00164745,
 929        0x8c00, 0xfffc0003, 0xe4000003,
 930        0x8c04, 0xf8ff00ff, 0x40600060,
 931        0x8c08, 0x00ff00ff, 0x001c001c,
 932        0x8cf0, 0x1fff1fff, 0x08e00410,
 933        0x8c20, 0x0fff0fff, 0x00800080,
 934        0x8c24, 0x0fff0fff, 0x00800080,
 935        0x8c18, 0xffffffff, 0x20202078,
 936        0x8c1c, 0x0000ffff, 0x00001010,
 937        0x28350, 0x00000f01, 0x00000000,
 938        0x9508, 0x3700001f, 0x00000002,
 939        0x960c, 0xffffffff, 0x54763210,
 940        0x88c4, 0x001f3ae3, 0x000000c2,
 941        0x88d4, 0x0000001f, 0x00000010,
 942        0x8974, 0xffffffff, 0x00000000
 943};
 944
 945static const u32 caicos_golden_registers[] =
 946{
 947        0x5eb4, 0xffffffff, 0x00000002,
 948        0x5e78, 0x8f311ff1, 0x001000f0,
 949        0x8c8, 0x00003420, 0x00001450,
 950        0x8cc, 0x000fffff, 0x00040035,
 951        0x3f90, 0xffff0000, 0xfffc0000,
 952        0x9148, 0xffff0000, 0xfffc0000,
 953        0x3f94, 0xffff0000, 0xfffc0000,
 954        0x914c, 0xffff0000, 0xfffc0000,
 955        0xc78, 0x00000080, 0x00000080,
 956        0xbd4, 0x00073007, 0x00010001,
 957        0xd02c, 0xbfffff1f, 0x08421000,
 958        0xd0b8, 0x03773777, 0x02010001,
 959        0x5bc0, 0x00200000, 0x50100000,
 960        0x98f8, 0x33773777, 0x02010001,
 961        0x98fc, 0xffffffff, 0x33221100,
 962        0x7030, 0x31000311, 0x00000011,
 963        0x2f48, 0x33773777, 0x02010001,
 964        0x6b28, 0x00000010, 0x00000012,
 965        0x7728, 0x00000010, 0x00000012,
 966        0x10328, 0x00000010, 0x00000012,
 967        0x10f28, 0x00000010, 0x00000012,
 968        0x11b28, 0x00000010, 0x00000012,
 969        0x12728, 0x00000010, 0x00000012,
 970        0x240c, 0x000007ff, 0x00000380,
 971        0x8a14, 0xf000001f, 0x00000001,
 972        0x8b24, 0x3fff3fff, 0x00ff0fff,
 973        0x8b10, 0x0000ff0f, 0x00000000,
 974        0x28a4c, 0x07ffffff, 0x06000000,
 975        0x10c, 0x00000001, 0x00010003,
 976        0xa02c, 0xffffffff, 0x0000009b,
 977        0x913c, 0x0000000f, 0x0100000a,
 978        0x8d00, 0xffff7f7f, 0x100e4848,
 979        0x8d04, 0x00ffffff, 0x00164745,
 980        0x8c00, 0xfffc0003, 0xe4000003,
 981        0x8c04, 0xf8ff00ff, 0x40600060,
 982        0x8c08, 0x00ff00ff, 0x001c001c,
 983        0x8cf0, 0x1fff1fff, 0x08e00410,
 984        0x8c20, 0x0fff0fff, 0x00800080,
 985        0x8c24, 0x0fff0fff, 0x00800080,
 986        0x8c18, 0xffffffff, 0x20202078,
 987        0x8c1c, 0x0000ffff, 0x00001010,
 988        0x28350, 0x00000f01, 0x00000000,
 989        0x9508, 0x3700001f, 0x00000002,
 990        0x960c, 0xffffffff, 0x54763210,
 991        0x88c4, 0x001f3ae3, 0x000000c2,
 992        0x88d4, 0x0000001f, 0x00000010,
 993        0x8974, 0xffffffff, 0x00000000
 994};
 995
 996static void evergreen_init_golden_registers(struct radeon_device *rdev)
 997{
 998        switch (rdev->family) {
 999        case CHIP_CYPRESS:
1000        case CHIP_HEMLOCK:
1001                radeon_program_register_sequence(rdev,
1002                                                 evergreen_golden_registers,
1003                                                 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1004                radeon_program_register_sequence(rdev,
1005                                                 evergreen_golden_registers2,
1006                                                 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1007                radeon_program_register_sequence(rdev,
1008                                                 cypress_mgcg_init,
1009                                                 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1010                break;
1011        case CHIP_JUNIPER:
1012                radeon_program_register_sequence(rdev,
1013                                                 evergreen_golden_registers,
1014                                                 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1015                radeon_program_register_sequence(rdev,
1016                                                 evergreen_golden_registers2,
1017                                                 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1018                radeon_program_register_sequence(rdev,
1019                                                 juniper_mgcg_init,
1020                                                 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1021                break;
1022        case CHIP_REDWOOD:
1023                radeon_program_register_sequence(rdev,
1024                                                 evergreen_golden_registers,
1025                                                 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1026                radeon_program_register_sequence(rdev,
1027                                                 evergreen_golden_registers2,
1028                                                 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1029                radeon_program_register_sequence(rdev,
1030                                                 redwood_mgcg_init,
1031                                                 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1032                break;
1033        case CHIP_CEDAR:
1034                radeon_program_register_sequence(rdev,
1035                                                 cedar_golden_registers,
1036                                                 (const u32)ARRAY_SIZE(cedar_golden_registers));
1037                radeon_program_register_sequence(rdev,
1038                                                 evergreen_golden_registers2,
1039                                                 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1040                radeon_program_register_sequence(rdev,
1041                                                 cedar_mgcg_init,
1042                                                 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1043                break;
1044        case CHIP_PALM:
1045                radeon_program_register_sequence(rdev,
1046                                                 wrestler_golden_registers,
1047                                                 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1048                break;
1049        case CHIP_SUMO:
1050                radeon_program_register_sequence(rdev,
1051                                                 supersumo_golden_registers,
1052                                                 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1053                break;
1054        case CHIP_SUMO2:
1055                radeon_program_register_sequence(rdev,
1056                                                 supersumo_golden_registers,
1057                                                 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1058                radeon_program_register_sequence(rdev,
1059                                                 sumo_golden_registers,
1060                                                 (const u32)ARRAY_SIZE(sumo_golden_registers));
1061                break;
1062        case CHIP_BARTS:
1063                radeon_program_register_sequence(rdev,
1064                                                 barts_golden_registers,
1065                                                 (const u32)ARRAY_SIZE(barts_golden_registers));
1066                break;
1067        case CHIP_TURKS:
1068                radeon_program_register_sequence(rdev,
1069                                                 turks_golden_registers,
1070                                                 (const u32)ARRAY_SIZE(turks_golden_registers));
1071                break;
1072        case CHIP_CAICOS:
1073                radeon_program_register_sequence(rdev,
1074                                                 caicos_golden_registers,
1075                                                 (const u32)ARRAY_SIZE(caicos_golden_registers));
1076                break;
1077        default:
1078                break;
1079        }
1080}
1081
1082/**
1083 * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1084 *
1085 * @rdev: radeon_device pointer
1086 * @reg: register offset in bytes
1087 * @val: register value
1088 *
1089 * Returns 0 for success or -EINVAL for an invalid register
1090 *
1091 */
1092int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1093                                        u32 reg, u32 *val)
1094{
1095        switch (reg) {
1096        case GRBM_STATUS:
1097        case GRBM_STATUS_SE0:
1098        case GRBM_STATUS_SE1:
1099        case SRBM_STATUS:
1100        case SRBM_STATUS2:
1101        case DMA_STATUS_REG:
1102        case UVD_STATUS:
1103                *val = RREG32(reg);
1104                return 0;
1105        default:
1106                return -EINVAL;
1107        }
1108}
1109
1110void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1111                             unsigned *bankh, unsigned *mtaspect,
1112                             unsigned *tile_split)
1113{
1114        *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1115        *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1116        *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1117        *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1118        switch (*bankw) {
1119        default:
1120        case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1121        case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1122        case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1123        case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1124        }
1125        switch (*bankh) {
1126        default:
1127        case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1128        case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1129        case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1130        case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1131        }
1132        switch (*mtaspect) {
1133        default:
1134        case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1135        case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1136        case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1137        case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1138        }
1139}
1140
1141static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1142                              u32 cntl_reg, u32 status_reg)
1143{
1144        int r, i;
1145        struct atom_clock_dividers dividers;
1146
1147        r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1148                                           clock, false, &dividers);
1149        if (r)
1150                return r;
1151
1152        WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1153
1154        for (i = 0; i < 100; i++) {
1155                if (RREG32(status_reg) & DCLK_STATUS)
1156                        break;
1157                mdelay(10);
1158        }
1159        if (i == 100)
1160                return -ETIMEDOUT;
1161
1162        return 0;
1163}
1164
1165int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1166{
1167        int r = 0;
1168        u32 cg_scratch = RREG32(CG_SCRATCH1);
1169
1170        r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1171        if (r)
1172                goto done;
1173        cg_scratch &= 0xffff0000;
1174        cg_scratch |= vclk / 100; /* Mhz */
1175
1176        r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1177        if (r)
1178                goto done;
1179        cg_scratch &= 0x0000ffff;
1180        cg_scratch |= (dclk / 100) << 16; /* Mhz */
1181
1182done:
1183        WREG32(CG_SCRATCH1, cg_scratch);
1184
1185        return r;
1186}
1187
1188int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1189{
1190        /* start off with something large */
1191        unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1192        int r;
1193
1194        /* bypass vclk and dclk with bclk */
1195        WREG32_P(CG_UPLL_FUNC_CNTL_2,
1196                VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1197                ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1198
1199        /* put PLL in bypass mode */
1200        WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1201
1202        if (!vclk || !dclk) {
1203                /* keep the Bypass mode, put PLL to sleep */
1204                WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1205                return 0;
1206        }
1207
1208        r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1209                                          16384, 0x03FFFFFF, 0, 128, 5,
1210                                          &fb_div, &vclk_div, &dclk_div);
1211        if (r)
1212                return r;
1213
1214        /* set VCO_MODE to 1 */
1215        WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1216
1217        /* toggle UPLL_SLEEP to 1 then back to 0 */
1218        WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1219        WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1220
1221        /* deassert UPLL_RESET */
1222        WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1223
1224        mdelay(1);
1225
1226        r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1227        if (r)
1228                return r;
1229
1230        /* assert UPLL_RESET again */
1231        WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1232
1233        /* disable spread spectrum. */
1234        WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1235
1236        /* set feedback divider */
1237        WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1238
1239        /* set ref divider to 0 */
1240        WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1241
1242        if (fb_div < 307200)
1243                WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1244        else
1245                WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1246
1247        /* set PDIV_A and PDIV_B */
1248        WREG32_P(CG_UPLL_FUNC_CNTL_2,
1249                UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1250                ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1251
1252        /* give the PLL some time to settle */
1253        mdelay(15);
1254
1255        /* deassert PLL_RESET */
1256        WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1257
1258        mdelay(15);
1259
1260        /* switch from bypass mode to normal mode */
1261        WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1262
1263        r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1264        if (r)
1265                return r;
1266
1267        /* switch VCLK and DCLK selection */
1268        WREG32_P(CG_UPLL_FUNC_CNTL_2,
1269                VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1270                ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1271
1272        mdelay(100);
1273
1274        return 0;
1275}
1276
1277void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1278{
1279        int readrq;
1280        u16 v;
1281
1282        readrq = pcie_get_readrq(rdev->pdev);
1283        v = ffs(readrq) - 8;
1284        /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1285         * to avoid hangs or perfomance issues
1286         */
1287        if ((v == 0) || (v == 6) || (v == 7))
1288                pcie_set_readrq(rdev->pdev, 512);
1289}
1290
1291void dce4_program_fmt(struct drm_encoder *encoder)
1292{
1293        struct drm_device *dev = encoder->dev;
1294        struct radeon_device *rdev = dev->dev_private;
1295        struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1296        struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1297        struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1298        int bpc = 0;
1299        u32 tmp = 0;
1300        enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1301
1302        if (connector) {
1303                struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1304                bpc = radeon_get_monitor_bpc(connector);
1305                dither = radeon_connector->dither;
1306        }
1307
1308        /* LVDS/eDP FMT is set up by atom */
1309        if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1310                return;
1311
1312        /* not needed for analog */
1313        if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1314            (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1315                return;
1316
1317        if (bpc == 0)
1318                return;
1319
1320        switch (bpc) {
1321        case 6:
1322                if (dither == RADEON_FMT_DITHER_ENABLE)
1323                        /* XXX sort out optimal dither settings */
1324                        tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1325                                FMT_SPATIAL_DITHER_EN);
1326                else
1327                        tmp |= FMT_TRUNCATE_EN;
1328                break;
1329        case 8:
1330                if (dither == RADEON_FMT_DITHER_ENABLE)
1331                        /* XXX sort out optimal dither settings */
1332                        tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1333                                FMT_RGB_RANDOM_ENABLE |
1334                                FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1335                else
1336                        tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1337                break;
1338        case 10:
1339        default:
1340                /* not needed */
1341                break;
1342        }
1343
1344        WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1345}
1346
1347static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1348{
1349        if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1350                return true;
1351        else
1352                return false;
1353}
1354
1355static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1356{
1357        u32 pos1, pos2;
1358
1359        pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1360        pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1361
1362        if (pos1 != pos2)
1363                return true;
1364        else
1365                return false;
1366}
1367
1368/**
1369 * dce4_wait_for_vblank - vblank wait asic callback.
1370 *
1371 * @rdev: radeon_device pointer
1372 * @crtc: crtc to wait for vblank on
1373 *
1374 * Wait for vblank on the requested crtc (evergreen+).
1375 */
1376void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1377{
1378        unsigned i = 0;
1379
1380        if (crtc >= rdev->num_crtc)
1381                return;
1382
1383        if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1384                return;
1385
1386        /* depending on when we hit vblank, we may be close to active; if so,
1387         * wait for another frame.
1388         */
1389        while (dce4_is_in_vblank(rdev, crtc)) {
1390                if (i++ % 100 == 0) {
1391                        if (!dce4_is_counter_moving(rdev, crtc))
1392                                break;
1393                }
1394        }
1395
1396        while (!dce4_is_in_vblank(rdev, crtc)) {
1397                if (i++ % 100 == 0) {
1398                        if (!dce4_is_counter_moving(rdev, crtc))
1399                                break;
1400                }
1401        }
1402}
1403
1404/**
1405 * evergreen_page_flip - pageflip callback.
1406 *
1407 * @rdev: radeon_device pointer
1408 * @crtc_id: crtc to cleanup pageflip on
1409 * @crtc_base: new address of the crtc (GPU MC address)
1410 *
1411 * Triggers the actual pageflip by updating the primary
1412 * surface base address (evergreen+).
1413 */
1414void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1415                         bool async)
1416{
1417        struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1418
1419        /* update the scanout addresses */
1420        WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1421               async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1422        WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1423               upper_32_bits(crtc_base));
1424        WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1425               (u32)crtc_base);
1426        /* post the write */
1427        RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1428}
1429
1430/**
1431 * evergreen_page_flip_pending - check if page flip is still pending
1432 *
1433 * @rdev: radeon_device pointer
1434 * @crtc_id: crtc to check
1435 *
1436 * Returns the current update pending status.
1437 */
1438bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1439{
1440        struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1441
1442        /* Return current update_pending status: */
1443        return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1444                EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1445}
1446
1447/* get temperature in millidegrees */
1448int evergreen_get_temp(struct radeon_device *rdev)
1449{
1450        u32 temp, toffset;
1451        int actual_temp = 0;
1452
1453        if (rdev->family == CHIP_JUNIPER) {
1454                toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1455                        TOFFSET_SHIFT;
1456                temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1457                        TS0_ADC_DOUT_SHIFT;
1458
1459                if (toffset & 0x100)
1460                        actual_temp = temp / 2 - (0x200 - toffset);
1461                else
1462                        actual_temp = temp / 2 + toffset;
1463
1464                actual_temp = actual_temp * 1000;
1465
1466        } else {
1467                temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1468                        ASIC_T_SHIFT;
1469
1470                if (temp & 0x400)
1471                        actual_temp = -256;
1472                else if (temp & 0x200)
1473                        actual_temp = 255;
1474                else if (temp & 0x100) {
1475                        actual_temp = temp & 0x1ff;
1476                        actual_temp |= ~0x1ff;
1477                } else
1478                        actual_temp = temp & 0xff;
1479
1480                actual_temp = (actual_temp * 1000) / 2;
1481        }
1482
1483        return actual_temp;
1484}
1485
1486int sumo_get_temp(struct radeon_device *rdev)
1487{
1488        u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1489        int actual_temp = temp - 49;
1490
1491        return actual_temp * 1000;
1492}
1493
1494/**
1495 * sumo_pm_init_profile - Initialize power profiles callback.
1496 *
1497 * @rdev: radeon_device pointer
1498 *
1499 * Initialize the power states used in profile mode
1500 * (sumo, trinity, SI).
1501 * Used for profile mode only.
1502 */
1503void sumo_pm_init_profile(struct radeon_device *rdev)
1504{
1505        int idx;
1506
1507        /* default */
1508        rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1509        rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1510        rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1511        rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1512
1513        /* low,mid sh/mh */
1514        if (rdev->flags & RADEON_IS_MOBILITY)
1515                idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1516        else
1517                idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1518
1519        rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1520        rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1521        rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1522        rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1523
1524        rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1525        rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1526        rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1527        rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1528
1529        rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1530        rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1531        rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1532        rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1533
1534        rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1535        rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1536        rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1537        rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1538
1539        /* high sh/mh */
1540        idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1541        rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1542        rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1543        rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1544        rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1545                rdev->pm.power_state[idx].num_clock_modes - 1;
1546
1547        rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1548        rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1549        rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1550        rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1551                rdev->pm.power_state[idx].num_clock_modes - 1;
1552}
1553
1554/**
1555 * btc_pm_init_profile - Initialize power profiles callback.
1556 *
1557 * @rdev: radeon_device pointer
1558 *
1559 * Initialize the power states used in profile mode
1560 * (BTC, cayman).
1561 * Used for profile mode only.
1562 */
1563void btc_pm_init_profile(struct radeon_device *rdev)
1564{
1565        int idx;
1566
1567        /* default */
1568        rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1569        rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1570        rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1571        rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1572        /* starting with BTC, there is one state that is used for both
1573         * MH and SH.  Difference is that we always use the high clock index for
1574         * mclk.
1575         */
1576        if (rdev->flags & RADEON_IS_MOBILITY)
1577                idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1578        else
1579                idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1580        /* low sh */
1581        rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1582        rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1583        rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1584        rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1585        /* mid sh */
1586        rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1587        rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1588        rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1589        rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1590        /* high sh */
1591        rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1592        rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1593        rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1594        rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1595        /* low mh */
1596        rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1597        rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1598        rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1599        rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1600        /* mid mh */
1601        rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1602        rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1603        rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1604        rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1605        /* high mh */
1606        rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1607        rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1608        rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1609        rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1610}
1611
1612/**
1613 * evergreen_pm_misc - set additional pm hw parameters callback.
1614 *
1615 * @rdev: radeon_device pointer
1616 *
1617 * Set non-clock parameters associated with a power state
1618 * (voltage, etc.) (evergreen+).
1619 */
1620void evergreen_pm_misc(struct radeon_device *rdev)
1621{
1622        int req_ps_idx = rdev->pm.requested_power_state_index;
1623        int req_cm_idx = rdev->pm.requested_clock_mode_index;
1624        struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1625        struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1626
1627        if (voltage->type == VOLTAGE_SW) {
1628                /* 0xff0x are flags rather then an actual voltage */
1629                if ((voltage->voltage & 0xff00) == 0xff00)
1630                        return;
1631                if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1632                        radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1633                        rdev->pm.current_vddc = voltage->voltage;
1634                        DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1635                }
1636
1637                /* starting with BTC, there is one state that is used for both
1638                 * MH and SH.  Difference is that we always use the high clock index for
1639                 * mclk and vddci.
1640                 */
1641                if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1642                    (rdev->family >= CHIP_BARTS) &&
1643                    rdev->pm.active_crtc_count &&
1644                    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1645                     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1646                        voltage = &rdev->pm.power_state[req_ps_idx].
1647                                clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1648
1649                /* 0xff0x are flags rather then an actual voltage */
1650                if ((voltage->vddci & 0xff00) == 0xff00)
1651                        return;
1652                if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1653                        radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1654                        rdev->pm.current_vddci = voltage->vddci;
1655                        DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1656                }
1657        }
1658}
1659
1660/**
1661 * evergreen_pm_prepare - pre-power state change callback.
1662 *
1663 * @rdev: radeon_device pointer
1664 *
1665 * Prepare for a power state change (evergreen+).
1666 */
1667void evergreen_pm_prepare(struct radeon_device *rdev)
1668{
1669        struct drm_device *ddev = rdev->ddev;
1670        struct drm_crtc *crtc;
1671        struct radeon_crtc *radeon_crtc;
1672        u32 tmp;
1673
1674        /* disable any active CRTCs */
1675        list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1676                radeon_crtc = to_radeon_crtc(crtc);
1677                if (radeon_crtc->enabled) {
1678                        tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1679                        tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1680                        WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1681                }
1682        }
1683}
1684
1685/**
1686 * evergreen_pm_finish - post-power state change callback.
1687 *
1688 * @rdev: radeon_device pointer
1689 *
1690 * Clean up after a power state change (evergreen+).
1691 */
1692void evergreen_pm_finish(struct radeon_device *rdev)
1693{
1694        struct drm_device *ddev = rdev->ddev;
1695        struct drm_crtc *crtc;
1696        struct radeon_crtc *radeon_crtc;
1697        u32 tmp;
1698
1699        /* enable any active CRTCs */
1700        list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1701                radeon_crtc = to_radeon_crtc(crtc);
1702                if (radeon_crtc->enabled) {
1703                        tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1704                        tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1705                        WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1706                }
1707        }
1708}
1709
1710/**
1711 * evergreen_hpd_sense - hpd sense callback.
1712 *
1713 * @rdev: radeon_device pointer
1714 * @hpd: hpd (hotplug detect) pin
1715 *
1716 * Checks if a digital monitor is connected (evergreen+).
1717 * Returns true if connected, false if not connected.
1718 */
1719bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1720{
1721        if (hpd == RADEON_HPD_NONE)
1722                return false;
1723
1724        return !!(RREG32(DC_HPDx_INT_STATUS_REG(hpd)) & DC_HPDx_SENSE);
1725}
1726
1727/**
1728 * evergreen_hpd_set_polarity - hpd set polarity callback.
1729 *
1730 * @rdev: radeon_device pointer
1731 * @hpd: hpd (hotplug detect) pin
1732 *
1733 * Set the polarity of the hpd pin (evergreen+).
1734 */
1735void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1736                                enum radeon_hpd_id hpd)
1737{
1738        bool connected = evergreen_hpd_sense(rdev, hpd);
1739
1740        if (hpd == RADEON_HPD_NONE)
1741                return;
1742
1743        if (connected)
1744                WREG32_AND(DC_HPDx_INT_CONTROL(hpd), ~DC_HPDx_INT_POLARITY);
1745        else
1746                WREG32_OR(DC_HPDx_INT_CONTROL(hpd), DC_HPDx_INT_POLARITY);
1747}
1748
1749/**
1750 * evergreen_hpd_init - hpd setup callback.
1751 *
1752 * @rdev: radeon_device pointer
1753 *
1754 * Setup the hpd pins used by the card (evergreen+).
1755 * Enable the pin, set the polarity, and enable the hpd interrupts.
1756 */
1757void evergreen_hpd_init(struct radeon_device *rdev)
1758{
1759        struct drm_device *dev = rdev->ddev;
1760        struct drm_connector *connector;
1761        unsigned enabled = 0;
1762        u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1763                DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1764
1765        list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1766                enum radeon_hpd_id hpd =
1767                        to_radeon_connector(connector)->hpd.hpd;
1768
1769                if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1770                    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1771                        /* don't try to enable hpd on eDP or LVDS avoid breaking the
1772                         * aux dp channel on imac and help (but not completely fix)
1773                         * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1774                         * also avoid interrupt storms during dpms.
1775                         */
1776                        continue;
1777                }
1778
1779                if (hpd == RADEON_HPD_NONE)
1780                        continue;
1781
1782                WREG32(DC_HPDx_CONTROL(hpd), tmp);
1783                enabled |= 1 << hpd;
1784
1785                radeon_hpd_set_polarity(rdev, hpd);
1786        }
1787        radeon_irq_kms_enable_hpd(rdev, enabled);
1788}
1789
1790/**
1791 * evergreen_hpd_fini - hpd tear down callback.
1792 *
1793 * @rdev: radeon_device pointer
1794 *
1795 * Tear down the hpd pins used by the card (evergreen+).
1796 * Disable the hpd interrupts.
1797 */
1798void evergreen_hpd_fini(struct radeon_device *rdev)
1799{
1800        struct drm_device *dev = rdev->ddev;
1801        struct drm_connector *connector;
1802        unsigned disabled = 0;
1803
1804        list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1805                enum radeon_hpd_id hpd =
1806                        to_radeon_connector(connector)->hpd.hpd;
1807
1808                if (hpd == RADEON_HPD_NONE)
1809                        continue;
1810
1811                WREG32(DC_HPDx_CONTROL(hpd), 0);
1812                disabled |= 1 << hpd;
1813        }
1814        radeon_irq_kms_disable_hpd(rdev, disabled);
1815}
1816
1817/* watermark setup */
1818
1819static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1820                                        struct radeon_crtc *radeon_crtc,
1821                                        struct drm_display_mode *mode,
1822                                        struct drm_display_mode *other_mode)
1823{
1824        u32 tmp, buffer_alloc, i;
1825        u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1826        /*
1827         * Line Buffer Setup
1828         * There are 3 line buffers, each one shared by 2 display controllers.
1829         * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1830         * the display controllers.  The paritioning is done via one of four
1831         * preset allocations specified in bits 2:0:
1832         * first display controller
1833         *  0 - first half of lb (3840 * 2)
1834         *  1 - first 3/4 of lb (5760 * 2)
1835         *  2 - whole lb (7680 * 2), other crtc must be disabled
1836         *  3 - first 1/4 of lb (1920 * 2)
1837         * second display controller
1838         *  4 - second half of lb (3840 * 2)
1839         *  5 - second 3/4 of lb (5760 * 2)
1840         *  6 - whole lb (7680 * 2), other crtc must be disabled
1841         *  7 - last 1/4 of lb (1920 * 2)
1842         */
1843        /* this can get tricky if we have two large displays on a paired group
1844         * of crtcs.  Ideally for multiple large displays we'd assign them to
1845         * non-linked crtcs for maximum line buffer allocation.
1846         */
1847        if (radeon_crtc->base.enabled && mode) {
1848                if (other_mode) {
1849                        tmp = 0; /* 1/2 */
1850                        buffer_alloc = 1;
1851                } else {
1852                        tmp = 2; /* whole */
1853                        buffer_alloc = 2;
1854                }
1855        } else {
1856                tmp = 0;
1857                buffer_alloc = 0;
1858        }
1859
1860        /* second controller of the pair uses second half of the lb */
1861        if (radeon_crtc->crtc_id % 2)
1862                tmp += 4;
1863        WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1864
1865        if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1866                WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1867                       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1868                for (i = 0; i < rdev->usec_timeout; i++) {
1869                        if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1870                            DMIF_BUFFERS_ALLOCATED_COMPLETED)
1871                                break;
1872                        udelay(1);
1873                }
1874        }
1875
1876        if (radeon_crtc->base.enabled && mode) {
1877                switch (tmp) {
1878                case 0:
1879                case 4:
1880                default:
1881                        if (ASIC_IS_DCE5(rdev))
1882                                return 4096 * 2;
1883                        else
1884                                return 3840 * 2;
1885                case 1:
1886                case 5:
1887                        if (ASIC_IS_DCE5(rdev))
1888                                return 6144 * 2;
1889                        else
1890                                return 5760 * 2;
1891                case 2:
1892                case 6:
1893                        if (ASIC_IS_DCE5(rdev))
1894                                return 8192 * 2;
1895                        else
1896                                return 7680 * 2;
1897                case 3:
1898                case 7:
1899                        if (ASIC_IS_DCE5(rdev))
1900                                return 2048 * 2;
1901                        else
1902                                return 1920 * 2;
1903                }
1904        }
1905
1906        /* controller not enabled, so no lb used */
1907        return 0;
1908}
1909
1910u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1911{
1912        u32 tmp = RREG32(MC_SHARED_CHMAP);
1913
1914        switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1915        case 0:
1916        default:
1917                return 1;
1918        case 1:
1919                return 2;
1920        case 2:
1921                return 4;
1922        case 3:
1923                return 8;
1924        }
1925}
1926
1927struct evergreen_wm_params {
1928        u32 dram_channels; /* number of dram channels */
1929        u32 yclk;          /* bandwidth per dram data pin in kHz */
1930        u32 sclk;          /* engine clock in kHz */
1931        u32 disp_clk;      /* display clock in kHz */
1932        u32 src_width;     /* viewport width */
1933        u32 active_time;   /* active display time in ns */
1934        u32 blank_time;    /* blank time in ns */
1935        bool interlaced;    /* mode is interlaced */
1936        fixed20_12 vsc;    /* vertical scale ratio */
1937        u32 num_heads;     /* number of active crtcs */
1938        u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1939        u32 lb_size;       /* line buffer allocated to pipe */
1940        u32 vtaps;         /* vertical scaler taps */
1941};
1942
1943static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1944{
1945        /* Calculate DRAM Bandwidth and the part allocated to display. */
1946        fixed20_12 dram_efficiency; /* 0.7 */
1947        fixed20_12 yclk, dram_channels, bandwidth;
1948        fixed20_12 a;
1949
1950        a.full = dfixed_const(1000);
1951        yclk.full = dfixed_const(wm->yclk);
1952        yclk.full = dfixed_div(yclk, a);
1953        dram_channels.full = dfixed_const(wm->dram_channels * 4);
1954        a.full = dfixed_const(10);
1955        dram_efficiency.full = dfixed_const(7);
1956        dram_efficiency.full = dfixed_div(dram_efficiency, a);
1957        bandwidth.full = dfixed_mul(dram_channels, yclk);
1958        bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1959
1960        return dfixed_trunc(bandwidth);
1961}
1962
1963static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1964{
1965        /* Calculate DRAM Bandwidth and the part allocated to display. */
1966        fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1967        fixed20_12 yclk, dram_channels, bandwidth;
1968        fixed20_12 a;
1969
1970        a.full = dfixed_const(1000);
1971        yclk.full = dfixed_const(wm->yclk);
1972        yclk.full = dfixed_div(yclk, a);
1973        dram_channels.full = dfixed_const(wm->dram_channels * 4);
1974        a.full = dfixed_const(10);
1975        disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1976        disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1977        bandwidth.full = dfixed_mul(dram_channels, yclk);
1978        bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1979
1980        return dfixed_trunc(bandwidth);
1981}
1982
1983static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1984{
1985        /* Calculate the display Data return Bandwidth */
1986        fixed20_12 return_efficiency; /* 0.8 */
1987        fixed20_12 sclk, bandwidth;
1988        fixed20_12 a;
1989
1990        a.full = dfixed_const(1000);
1991        sclk.full = dfixed_const(wm->sclk);
1992        sclk.full = dfixed_div(sclk, a);
1993        a.full = dfixed_const(10);
1994        return_efficiency.full = dfixed_const(8);
1995        return_efficiency.full = dfixed_div(return_efficiency, a);
1996        a.full = dfixed_const(32);
1997        bandwidth.full = dfixed_mul(a, sclk);
1998        bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1999
2000        return dfixed_trunc(bandwidth);
2001}
2002
2003static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2004{
2005        /* Calculate the DMIF Request Bandwidth */
2006        fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2007        fixed20_12 disp_clk, bandwidth;
2008        fixed20_12 a;
2009
2010        a.full = dfixed_const(1000);
2011        disp_clk.full = dfixed_const(wm->disp_clk);
2012        disp_clk.full = dfixed_div(disp_clk, a);
2013        a.full = dfixed_const(10);
2014        disp_clk_request_efficiency.full = dfixed_const(8);
2015        disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2016        a.full = dfixed_const(32);
2017        bandwidth.full = dfixed_mul(a, disp_clk);
2018        bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2019
2020        return dfixed_trunc(bandwidth);
2021}
2022
2023static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2024{
2025        /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2026        u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2027        u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2028        u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2029
2030        return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2031}
2032
2033static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2034{
2035        /* Calculate the display mode Average Bandwidth
2036         * DisplayMode should contain the source and destination dimensions,
2037         * timing, etc.
2038         */
2039        fixed20_12 bpp;
2040        fixed20_12 line_time;
2041        fixed20_12 src_width;
2042        fixed20_12 bandwidth;
2043        fixed20_12 a;
2044
2045        a.full = dfixed_const(1000);
2046        line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2047        line_time.full = dfixed_div(line_time, a);
2048        bpp.full = dfixed_const(wm->bytes_per_pixel);
2049        src_width.full = dfixed_const(wm->src_width);
2050        bandwidth.full = dfixed_mul(src_width, bpp);
2051        bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2052        bandwidth.full = dfixed_div(bandwidth, line_time);
2053
2054        return dfixed_trunc(bandwidth);
2055}
2056
2057static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2058{
2059        /* First calcualte the latency in ns */
2060        u32 mc_latency = 2000; /* 2000 ns. */
2061        u32 available_bandwidth = evergreen_available_bandwidth(wm);
2062        u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2063        u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2064        u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2065        u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2066                (wm->num_heads * cursor_line_pair_return_time);
2067        u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2068        u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2069        fixed20_12 a, b, c;
2070
2071        if (wm->num_heads == 0)
2072                return 0;
2073
2074        a.full = dfixed_const(2);
2075        b.full = dfixed_const(1);
2076        if ((wm->vsc.full > a.full) ||
2077            ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2078            (wm->vtaps >= 5) ||
2079            ((wm->vsc.full >= a.full) && wm->interlaced))
2080                max_src_lines_per_dst_line = 4;
2081        else
2082                max_src_lines_per_dst_line = 2;
2083
2084        a.full = dfixed_const(available_bandwidth);
2085        b.full = dfixed_const(wm->num_heads);
2086        a.full = dfixed_div(a, b);
2087
2088        lb_fill_bw = min(dfixed_trunc(a), wm->disp_clk * wm->bytes_per_pixel / 1000);
2089
2090        a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2091        b.full = dfixed_const(1000);
2092        c.full = dfixed_const(lb_fill_bw);
2093        b.full = dfixed_div(c, b);
2094        a.full = dfixed_div(a, b);
2095        line_fill_time = dfixed_trunc(a);
2096
2097        if (line_fill_time < wm->active_time)
2098                return latency;
2099        else
2100                return latency + (line_fill_time - wm->active_time);
2101
2102}
2103
2104static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2105{
2106        if (evergreen_average_bandwidth(wm) <=
2107            (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2108                return true;
2109        else
2110                return false;
2111};
2112
2113static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2114{
2115        if (evergreen_average_bandwidth(wm) <=
2116            (evergreen_available_bandwidth(wm) / wm->num_heads))
2117                return true;
2118        else
2119                return false;
2120};
2121
2122static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2123{
2124        u32 lb_partitions = wm->lb_size / wm->src_width;
2125        u32 line_time = wm->active_time + wm->blank_time;
2126        u32 latency_tolerant_lines;
2127        u32 latency_hiding;
2128        fixed20_12 a;
2129
2130        a.full = dfixed_const(1);
2131        if (wm->vsc.full > a.full)
2132                latency_tolerant_lines = 1;
2133        else {
2134                if (lb_partitions <= (wm->vtaps + 1))
2135                        latency_tolerant_lines = 1;
2136                else
2137                        latency_tolerant_lines = 2;
2138        }
2139
2140        latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2141
2142        if (evergreen_latency_watermark(wm) <= latency_hiding)
2143                return true;
2144        else
2145                return false;
2146}
2147
2148static void evergreen_program_watermarks(struct radeon_device *rdev,
2149                                         struct radeon_crtc *radeon_crtc,
2150                                         u32 lb_size, u32 num_heads)
2151{
2152        struct drm_display_mode *mode = &radeon_crtc->base.mode;
2153        struct evergreen_wm_params wm_low, wm_high;
2154        u32 dram_channels;
2155        u32 active_time;
2156        u32 line_time = 0;
2157        u32 latency_watermark_a = 0, latency_watermark_b = 0;
2158        u32 priority_a_mark = 0, priority_b_mark = 0;
2159        u32 priority_a_cnt = PRIORITY_OFF;
2160        u32 priority_b_cnt = PRIORITY_OFF;
2161        u32 pipe_offset = radeon_crtc->crtc_id * 16;
2162        u32 tmp, arb_control3;
2163        fixed20_12 a, b, c;
2164
2165        if (radeon_crtc->base.enabled && num_heads && mode) {
2166                active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000,
2167                                            (u32)mode->clock);
2168                line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000,
2169                                          (u32)mode->clock);
2170                line_time = min(line_time, (u32)65535);
2171                priority_a_cnt = 0;
2172                priority_b_cnt = 0;
2173                dram_channels = evergreen_get_number_of_dram_channels(rdev);
2174
2175                /* watermark for high clocks */
2176                if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2177                        wm_high.yclk =
2178                                radeon_dpm_get_mclk(rdev, false) * 10;
2179                        wm_high.sclk =
2180                                radeon_dpm_get_sclk(rdev, false) * 10;
2181                } else {
2182                        wm_high.yclk = rdev->pm.current_mclk * 10;
2183                        wm_high.sclk = rdev->pm.current_sclk * 10;
2184                }
2185
2186                wm_high.disp_clk = mode->clock;
2187                wm_high.src_width = mode->crtc_hdisplay;
2188                wm_high.active_time = active_time;
2189                wm_high.blank_time = line_time - wm_high.active_time;
2190                wm_high.interlaced = false;
2191                if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2192                        wm_high.interlaced = true;
2193                wm_high.vsc = radeon_crtc->vsc;
2194                wm_high.vtaps = 1;
2195                if (radeon_crtc->rmx_type != RMX_OFF)
2196                        wm_high.vtaps = 2;
2197                wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2198                wm_high.lb_size = lb_size;
2199                wm_high.dram_channels = dram_channels;
2200                wm_high.num_heads = num_heads;
2201
2202                /* watermark for low clocks */
2203                if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2204                        wm_low.yclk =
2205                                radeon_dpm_get_mclk(rdev, true) * 10;
2206                        wm_low.sclk =
2207                                radeon_dpm_get_sclk(rdev, true) * 10;
2208                } else {
2209                        wm_low.yclk = rdev->pm.current_mclk * 10;
2210                        wm_low.sclk = rdev->pm.current_sclk * 10;
2211                }
2212
2213                wm_low.disp_clk = mode->clock;
2214                wm_low.src_width = mode->crtc_hdisplay;
2215                wm_low.active_time = active_time;
2216                wm_low.blank_time = line_time - wm_low.active_time;
2217                wm_low.interlaced = false;
2218                if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2219                        wm_low.interlaced = true;
2220                wm_low.vsc = radeon_crtc->vsc;
2221                wm_low.vtaps = 1;
2222                if (radeon_crtc->rmx_type != RMX_OFF)
2223                        wm_low.vtaps = 2;
2224                wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2225                wm_low.lb_size = lb_size;
2226                wm_low.dram_channels = dram_channels;
2227                wm_low.num_heads = num_heads;
2228
2229                /* set for high clocks */
2230                latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2231                /* set for low clocks */
2232                latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2233
2234                /* possibly force display priority to high */
2235                /* should really do this at mode validation time... */
2236                if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2237                    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2238                    !evergreen_check_latency_hiding(&wm_high) ||
2239                    (rdev->disp_priority == 2)) {
2240                        DRM_DEBUG_KMS("force priority a to high\n");
2241                        priority_a_cnt |= PRIORITY_ALWAYS_ON;
2242                }
2243                if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2244                    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2245                    !evergreen_check_latency_hiding(&wm_low) ||
2246                    (rdev->disp_priority == 2)) {
2247                        DRM_DEBUG_KMS("force priority b to high\n");
2248                        priority_b_cnt |= PRIORITY_ALWAYS_ON;
2249                }
2250
2251                a.full = dfixed_const(1000);
2252                b.full = dfixed_const(mode->clock);
2253                b.full = dfixed_div(b, a);
2254                c.full = dfixed_const(latency_watermark_a);
2255                c.full = dfixed_mul(c, b);
2256                c.full = dfixed_mul(c, radeon_crtc->hsc);
2257                c.full = dfixed_div(c, a);
2258                a.full = dfixed_const(16);
2259                c.full = dfixed_div(c, a);
2260                priority_a_mark = dfixed_trunc(c);
2261                priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2262
2263                a.full = dfixed_const(1000);
2264                b.full = dfixed_const(mode->clock);
2265                b.full = dfixed_div(b, a);
2266                c.full = dfixed_const(latency_watermark_b);
2267                c.full = dfixed_mul(c, b);
2268                c.full = dfixed_mul(c, radeon_crtc->hsc);
2269                c.full = dfixed_div(c, a);
2270                a.full = dfixed_const(16);
2271                c.full = dfixed_div(c, a);
2272                priority_b_mark = dfixed_trunc(c);
2273                priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2274
2275                /* Save number of lines the linebuffer leads before the scanout */
2276                radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2277        }
2278
2279        /* select wm A */
2280        arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2281        tmp = arb_control3;
2282        tmp &= ~LATENCY_WATERMARK_MASK(3);
2283        tmp |= LATENCY_WATERMARK_MASK(1);
2284        WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2285        WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2286               (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2287                LATENCY_HIGH_WATERMARK(line_time)));
2288        /* select wm B */
2289        tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2290        tmp &= ~LATENCY_WATERMARK_MASK(3);
2291        tmp |= LATENCY_WATERMARK_MASK(2);
2292        WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2293        WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2294               (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2295                LATENCY_HIGH_WATERMARK(line_time)));
2296        /* restore original selection */
2297        WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2298
2299        /* write the priority marks */
2300        WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2301        WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2302
2303        /* save values for DPM */
2304        radeon_crtc->line_time = line_time;
2305        radeon_crtc->wm_high = latency_watermark_a;
2306        radeon_crtc->wm_low = latency_watermark_b;
2307}
2308
2309/**
2310 * evergreen_bandwidth_update - update display watermarks callback.
2311 *
2312 * @rdev: radeon_device pointer
2313 *
2314 * Update the display watermarks based on the requested mode(s)
2315 * (evergreen+).
2316 */
2317void evergreen_bandwidth_update(struct radeon_device *rdev)
2318{
2319        struct drm_display_mode *mode0 = NULL;
2320        struct drm_display_mode *mode1 = NULL;
2321        u32 num_heads = 0, lb_size;
2322        int i;
2323
2324        if (!rdev->mode_info.mode_config_initialized)
2325                return;
2326
2327        radeon_update_display_priority(rdev);
2328
2329        for (i = 0; i < rdev->num_crtc; i++) {
2330                if (rdev->mode_info.crtcs[i]->base.enabled)
2331                        num_heads++;
2332        }
2333        for (i = 0; i < rdev->num_crtc; i += 2) {
2334                mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2335                mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2336                lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2337                evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2338                lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2339                evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2340        }
2341}
2342
2343/**
2344 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2345 *
2346 * @rdev: radeon_device pointer
2347 *
2348 * Wait for the MC (memory controller) to be idle.
2349 * (evergreen+).
2350 * Returns 0 if the MC is idle, -1 if not.
2351 */
2352int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2353{
2354        unsigned i;
2355        u32 tmp;
2356
2357        for (i = 0; i < rdev->usec_timeout; i++) {
2358                /* read MC_STATUS */
2359                tmp = RREG32(SRBM_STATUS) & 0x1F00;
2360                if (!tmp)
2361                        return 0;
2362                udelay(1);
2363        }
2364        return -1;
2365}
2366
2367/*
2368 * GART
2369 */
2370void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2371{
2372        unsigned i;
2373        u32 tmp;
2374
2375        WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2376
2377        WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2378        for (i = 0; i < rdev->usec_timeout; i++) {
2379                /* read MC_STATUS */
2380                tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2381                tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2382                if (tmp == 2) {
2383                        pr_warn("[drm] r600 flush TLB failed\n");
2384                        return;
2385                }
2386                if (tmp) {
2387                        return;
2388                }
2389                udelay(1);
2390        }
2391}
2392
2393static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2394{
2395        u32 tmp;
2396        int r;
2397
2398        if (rdev->gart.robj == NULL) {
2399                dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2400                return -EINVAL;
2401        }
2402        r = radeon_gart_table_vram_pin(rdev);
2403        if (r)
2404                return r;
2405        /* Setup L2 cache */
2406        WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2407                                ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2408                                EFFECTIVE_L2_QUEUE_SIZE(7));
2409        WREG32(VM_L2_CNTL2, 0);
2410        WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2411        /* Setup TLB control */
2412        tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2413                SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2414                SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2415                EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2416        if (rdev->flags & RADEON_IS_IGP) {
2417                WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2418                WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2419                WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2420        } else {
2421                WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2422                WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2423                WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2424                if ((rdev->family == CHIP_JUNIPER) ||
2425                    (rdev->family == CHIP_CYPRESS) ||
2426                    (rdev->family == CHIP_HEMLOCK) ||
2427                    (rdev->family == CHIP_BARTS))
2428                        WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2429        }
2430        WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2431        WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2432        WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2433        WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2434        WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2435        WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2436        WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2437        WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2438                                RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2439        WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2440                        (u32)(rdev->dummy_page.addr >> 12));
2441        WREG32(VM_CONTEXT1_CNTL, 0);
2442
2443        evergreen_pcie_gart_tlb_flush(rdev);
2444        DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2445                 (unsigned)(rdev->mc.gtt_size >> 20),
2446                 (unsigned long long)rdev->gart.table_addr);
2447        rdev->gart.ready = true;
2448        return 0;
2449}
2450
2451static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2452{
2453        u32 tmp;
2454
2455        /* Disable all tables */
2456        WREG32(VM_CONTEXT0_CNTL, 0);
2457        WREG32(VM_CONTEXT1_CNTL, 0);
2458
2459        /* Setup L2 cache */
2460        WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2461                                EFFECTIVE_L2_QUEUE_SIZE(7));
2462        WREG32(VM_L2_CNTL2, 0);
2463        WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2464        /* Setup TLB control */
2465        tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2466        WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2467        WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2468        WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2469        WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2470        WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2471        WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2472        WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2473        radeon_gart_table_vram_unpin(rdev);
2474}
2475
2476static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2477{
2478        evergreen_pcie_gart_disable(rdev);
2479        radeon_gart_table_vram_free(rdev);
2480        radeon_gart_fini(rdev);
2481}
2482
2483
2484static void evergreen_agp_enable(struct radeon_device *rdev)
2485{
2486        u32 tmp;
2487
2488        /* Setup L2 cache */
2489        WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2490                                ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2491                                EFFECTIVE_L2_QUEUE_SIZE(7));
2492        WREG32(VM_L2_CNTL2, 0);
2493        WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2494        /* Setup TLB control */
2495        tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2496                SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2497                SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2498                EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2499        WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2500        WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2501        WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2502        WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2503        WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2504        WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2505        WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2506        WREG32(VM_CONTEXT0_CNTL, 0);
2507        WREG32(VM_CONTEXT1_CNTL, 0);
2508}
2509
2510static const unsigned ni_dig_offsets[] =
2511{
2512        NI_DIG0_REGISTER_OFFSET,
2513        NI_DIG1_REGISTER_OFFSET,
2514        NI_DIG2_REGISTER_OFFSET,
2515        NI_DIG3_REGISTER_OFFSET,
2516        NI_DIG4_REGISTER_OFFSET,
2517        NI_DIG5_REGISTER_OFFSET
2518};
2519
2520static const unsigned ni_tx_offsets[] =
2521{
2522        NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2523        NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2524        NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2525        NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2526        NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2527        NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2528};
2529
2530static const unsigned evergreen_dp_offsets[] =
2531{
2532        EVERGREEN_DP0_REGISTER_OFFSET,
2533        EVERGREEN_DP1_REGISTER_OFFSET,
2534        EVERGREEN_DP2_REGISTER_OFFSET,
2535        EVERGREEN_DP3_REGISTER_OFFSET,
2536        EVERGREEN_DP4_REGISTER_OFFSET,
2537        EVERGREEN_DP5_REGISTER_OFFSET
2538};
2539
2540static const unsigned evergreen_disp_int_status[] =
2541{
2542        DISP_INTERRUPT_STATUS,
2543        DISP_INTERRUPT_STATUS_CONTINUE,
2544        DISP_INTERRUPT_STATUS_CONTINUE2,
2545        DISP_INTERRUPT_STATUS_CONTINUE3,
2546        DISP_INTERRUPT_STATUS_CONTINUE4,
2547        DISP_INTERRUPT_STATUS_CONTINUE5
2548};
2549
2550/*
2551 * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2552 * We go from crtc to connector and it is not relible  since it
2553 * should be an opposite direction .If crtc is enable then
2554 * find the dig_fe which selects this crtc and insure that it enable.
2555 * if such dig_fe is found then find dig_be which selects found dig_be and
2556 * insure that it enable and in DP_SST mode.
2557 * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2558 * from dp symbols clocks .
2559 */
2560static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2561                                               unsigned crtc_id, unsigned *ret_dig_fe)
2562{
2563        unsigned i;
2564        unsigned dig_fe;
2565        unsigned dig_be;
2566        unsigned dig_en_be;
2567        unsigned uniphy_pll;
2568        unsigned digs_fe_selected;
2569        unsigned dig_be_mode;
2570        unsigned dig_fe_mask;
2571        bool is_enabled = false;
2572        bool found_crtc = false;
2573
2574        /* loop through all running dig_fe to find selected crtc */
2575        for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2576                dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2577                if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2578                    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2579                        /* found running pipe */
2580                        found_crtc = true;
2581                        dig_fe_mask = 1 << i;
2582                        dig_fe = i;
2583                        break;
2584                }
2585        }
2586
2587        if (found_crtc) {
2588                /* loop through all running dig_be to find selected dig_fe */
2589                for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2590                        dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2591                        /* if dig_fe_selected by dig_be? */
2592                        digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2593                        dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2594                        if (dig_fe_mask &  digs_fe_selected &&
2595                            /* if dig_be in sst mode? */
2596                            dig_be_mode == NI_DIG_BE_DPSST) {
2597                                dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2598                                                   ni_dig_offsets[i]);
2599                                uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2600                                                    ni_tx_offsets[i]);
2601                                /* dig_be enable and tx is running */
2602                                if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2603                                    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2604                                    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2605                                        is_enabled = true;
2606                                        *ret_dig_fe = dig_fe;
2607                                        break;
2608                                }
2609                        }
2610                }
2611        }
2612
2613        return is_enabled;
2614}
2615
2616/*
2617 * Blank dig when in dp sst mode
2618 * Dig ignores crtc timing
2619 */
2620static void evergreen_blank_dp_output(struct radeon_device *rdev,
2621                                      unsigned dig_fe)
2622{
2623        unsigned stream_ctrl;
2624        unsigned fifo_ctrl;
2625        unsigned counter = 0;
2626
2627        if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2628                DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2629                return;
2630        }
2631
2632        stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2633                             evergreen_dp_offsets[dig_fe]);
2634        if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2635                DRM_ERROR("dig %d , should be enable\n", dig_fe);
2636                return;
2637        }
2638
2639        stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2640        WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2641               evergreen_dp_offsets[dig_fe], stream_ctrl);
2642
2643        stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2644                             evergreen_dp_offsets[dig_fe]);
2645        while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2646                msleep(1);
2647                counter++;
2648                stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2649                                     evergreen_dp_offsets[dig_fe]);
2650        }
2651        if (counter >= 32 )
2652                DRM_ERROR("counter exceeds %d\n", counter);
2653
2654        fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2655        fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2656        WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2657
2658}
2659
2660void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2661{
2662        u32 crtc_enabled, tmp, frame_count, blackout;
2663        int i, j;
2664        unsigned dig_fe;
2665
2666        if (!ASIC_IS_NODCE(rdev)) {
2667                save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2668                save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2669
2670                /* disable VGA render */
2671                WREG32(VGA_RENDER_CONTROL, 0);
2672        }
2673        /* blank the display controllers */
2674        for (i = 0; i < rdev->num_crtc; i++) {
2675                crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2676                if (crtc_enabled) {
2677                        save->crtc_enabled[i] = true;
2678                        if (ASIC_IS_DCE6(rdev)) {
2679                                tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2680                                if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2681                                        radeon_wait_for_vblank(rdev, i);
2682                                        WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2683                                        tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2684                                        WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2685                                        WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2686                                }
2687                        } else {
2688                                tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2689                                if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2690                                        radeon_wait_for_vblank(rdev, i);
2691                                        WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2692                                        tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2693                                        WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2694                                        WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2695                                }
2696                        }
2697                        /* wait for the next frame */
2698                        frame_count = radeon_get_vblank_counter(rdev, i);
2699                        for (j = 0; j < rdev->usec_timeout; j++) {
2700                                if (radeon_get_vblank_counter(rdev, i) != frame_count)
2701                                        break;
2702                                udelay(1);
2703                        }
2704                        /*we should disable dig if it drives dp sst*/
2705                        /*but we are in radeon_device_init and the topology is unknown*/
2706                        /*and it is available after radeon_modeset_init*/
2707                        /*the following method radeon_atom_encoder_dpms_dig*/
2708                        /*does the job if we initialize it properly*/
2709                        /*for now we do it this manually*/
2710                        /**/
2711                        if (ASIC_IS_DCE5(rdev) &&
2712                            evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2713                                evergreen_blank_dp_output(rdev, dig_fe);
2714                        /*we could remove 6 lines below*/
2715                        /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2716                        WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2717                        tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2718                        tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2719                        WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2720                        WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2721                        save->crtc_enabled[i] = false;
2722                        /* ***** */
2723                } else {
2724                        save->crtc_enabled[i] = false;
2725                }
2726        }
2727
2728        radeon_mc_wait_for_idle(rdev);
2729
2730        blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2731        if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2732                /* Block CPU access */
2733                WREG32(BIF_FB_EN, 0);
2734                /* blackout the MC */
2735                blackout &= ~BLACKOUT_MODE_MASK;
2736                WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2737        }
2738        /* wait for the MC to settle */
2739        udelay(100);
2740
2741        /* lock double buffered regs */
2742        for (i = 0; i < rdev->num_crtc; i++) {
2743                if (save->crtc_enabled[i]) {
2744                        tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2745                        if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2746                                tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2747                                WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2748                        }
2749                        tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2750                        if (!(tmp & 1)) {
2751                                tmp |= 1;
2752                                WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2753                        }
2754                }
2755        }
2756}
2757
2758void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2759{
2760        u32 tmp, frame_count;
2761        int i, j;
2762
2763        /* update crtc base addresses */
2764        for (i = 0; i < rdev->num_crtc; i++) {
2765                WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2766                       upper_32_bits(rdev->mc.vram_start));
2767                WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2768                       upper_32_bits(rdev->mc.vram_start));
2769                WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2770                       (u32)rdev->mc.vram_start);
2771                WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2772                       (u32)rdev->mc.vram_start);
2773        }
2774
2775        if (!ASIC_IS_NODCE(rdev)) {
2776                WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2777                WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2778        }
2779
2780        /* unlock regs and wait for update */
2781        for (i = 0; i < rdev->num_crtc; i++) {
2782                if (save->crtc_enabled[i]) {
2783                        tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2784                        if ((tmp & 0x7) != 0) {
2785                                tmp &= ~0x7;
2786                                WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2787                        }
2788                        tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2789                        if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2790                                tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2791                                WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2792                        }
2793                        tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2794                        if (tmp & 1) {
2795                                tmp &= ~1;
2796                                WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2797                        }
2798                        for (j = 0; j < rdev->usec_timeout; j++) {
2799                                tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2800                                if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2801                                        break;
2802                                udelay(1);
2803                        }
2804                }
2805        }
2806
2807        /* unblackout the MC */
2808        tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2809        tmp &= ~BLACKOUT_MODE_MASK;
2810        WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2811        /* allow CPU access */
2812        WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2813
2814        for (i = 0; i < rdev->num_crtc; i++) {
2815                if (save->crtc_enabled[i]) {
2816                        if (ASIC_IS_DCE6(rdev)) {
2817                                tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2818                                tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2819                                WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2820                                WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2821                                WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2822                        } else {
2823                                tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2824                                tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2825                                WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2826                                WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2827                                WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2828                        }
2829                        /* wait for the next frame */
2830                        frame_count = radeon_get_vblank_counter(rdev, i);
2831                        for (j = 0; j < rdev->usec_timeout; j++) {
2832                                if (radeon_get_vblank_counter(rdev, i) != frame_count)
2833                                        break;
2834                                udelay(1);
2835                        }
2836                }
2837        }
2838        if (!ASIC_IS_NODCE(rdev)) {
2839                /* Unlock vga access */
2840                WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2841                mdelay(1);
2842                WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2843        }
2844}
2845
2846void evergreen_mc_program(struct radeon_device *rdev)
2847{
2848        struct evergreen_mc_save save;
2849        u32 tmp;
2850        int i, j;
2851
2852        /* Initialize HDP */
2853        for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2854                WREG32((0x2c14 + j), 0x00000000);
2855                WREG32((0x2c18 + j), 0x00000000);
2856                WREG32((0x2c1c + j), 0x00000000);
2857                WREG32((0x2c20 + j), 0x00000000);
2858                WREG32((0x2c24 + j), 0x00000000);
2859        }
2860        WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2861
2862        evergreen_mc_stop(rdev, &save);
2863        if (evergreen_mc_wait_for_idle(rdev)) {
2864                dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2865        }
2866        /* Lockout access through VGA aperture*/
2867        WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2868        /* Update configuration */
2869        if (rdev->flags & RADEON_IS_AGP) {
2870                if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2871                        /* VRAM before AGP */
2872                        WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2873                                rdev->mc.vram_start >> 12);
2874                        WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2875                                rdev->mc.gtt_end >> 12);
2876                } else {
2877                        /* VRAM after AGP */
2878                        WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2879                                rdev->mc.gtt_start >> 12);
2880                        WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2881                                rdev->mc.vram_end >> 12);
2882                }
2883        } else {
2884                WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2885                        rdev->mc.vram_start >> 12);
2886                WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2887                        rdev->mc.vram_end >> 12);
2888        }
2889        WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2890        /* llano/ontario only */
2891        if ((rdev->family == CHIP_PALM) ||
2892            (rdev->family == CHIP_SUMO) ||
2893            (rdev->family == CHIP_SUMO2)) {
2894                tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2895                tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2896                tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2897                WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2898        }
2899        tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2900        tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2901        WREG32(MC_VM_FB_LOCATION, tmp);
2902        WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2903        WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2904        WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2905        if (rdev->flags & RADEON_IS_AGP) {
2906                WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2907                WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2908                WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2909        } else {
2910                WREG32(MC_VM_AGP_BASE, 0);
2911                WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2912                WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2913        }
2914        if (evergreen_mc_wait_for_idle(rdev)) {
2915                dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2916        }
2917        evergreen_mc_resume(rdev, &save);
2918        /* we need to own VRAM, so turn off the VGA renderer here
2919         * to stop it overwriting our objects */
2920        rv515_vga_render_disable(rdev);
2921}
2922
2923/*
2924 * CP.
2925 */
2926void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2927{
2928        struct radeon_ring *ring = &rdev->ring[ib->ring];
2929        u32 next_rptr;
2930
2931        /* set to DX10/11 mode */
2932        radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2933        radeon_ring_write(ring, 1);
2934
2935        if (ring->rptr_save_reg) {
2936                next_rptr = ring->wptr + 3 + 4;
2937                radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2938                radeon_ring_write(ring, ((ring->rptr_save_reg - 
2939                                          PACKET3_SET_CONFIG_REG_START) >> 2));
2940                radeon_ring_write(ring, next_rptr);
2941        } else if (rdev->wb.enabled) {
2942                next_rptr = ring->wptr + 5 + 4;
2943                radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2944                radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2945                radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2946                radeon_ring_write(ring, next_rptr);
2947                radeon_ring_write(ring, 0);
2948        }
2949
2950        radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2951        radeon_ring_write(ring,
2952#ifdef __BIG_ENDIAN
2953                          (2 << 0) |
2954#endif
2955                          (ib->gpu_addr & 0xFFFFFFFC));
2956        radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2957        radeon_ring_write(ring, ib->length_dw);
2958}
2959
2960
2961static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2962{
2963        const __be32 *fw_data;
2964        int i;
2965
2966        if (!rdev->me_fw || !rdev->pfp_fw)
2967                return -EINVAL;
2968
2969        r700_cp_stop(rdev);
2970        WREG32(CP_RB_CNTL,
2971#ifdef __BIG_ENDIAN
2972               BUF_SWAP_32BIT |
2973#endif
2974               RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2975
2976        fw_data = (const __be32 *)rdev->pfp_fw->data;
2977        WREG32(CP_PFP_UCODE_ADDR, 0);
2978        for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2979                WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2980        WREG32(CP_PFP_UCODE_ADDR, 0);
2981
2982        fw_data = (const __be32 *)rdev->me_fw->data;
2983        WREG32(CP_ME_RAM_WADDR, 0);
2984        for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2985                WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2986
2987        WREG32(CP_PFP_UCODE_ADDR, 0);
2988        WREG32(CP_ME_RAM_WADDR, 0);
2989        WREG32(CP_ME_RAM_RADDR, 0);
2990        return 0;
2991}
2992
2993static int evergreen_cp_start(struct radeon_device *rdev)
2994{
2995        struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2996        int r, i;
2997        uint32_t cp_me;
2998
2999        r = radeon_ring_lock(rdev, ring, 7);
3000        if (r) {
3001                DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3002                return r;
3003        }
3004        radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3005        radeon_ring_write(ring, 0x1);
3006        radeon_ring_write(ring, 0x0);
3007        radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3008        radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3009        radeon_ring_write(ring, 0);
3010        radeon_ring_write(ring, 0);
3011        radeon_ring_unlock_commit(rdev, ring, false);
3012
3013        cp_me = 0xff;
3014        WREG32(CP_ME_CNTL, cp_me);
3015
3016        r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3017        if (r) {
3018                DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3019                return r;
3020        }
3021
3022        /* setup clear context state */
3023        radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3024        radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3025
3026        for (i = 0; i < evergreen_default_size; i++)
3027                radeon_ring_write(ring, evergreen_default_state[i]);
3028
3029        radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3030        radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3031
3032        /* set clear context state */
3033        radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3034        radeon_ring_write(ring, 0);
3035
3036        /* SQ_VTX_BASE_VTX_LOC */
3037        radeon_ring_write(ring, 0xc0026f00);
3038        radeon_ring_write(ring, 0x00000000);
3039        radeon_ring_write(ring, 0x00000000);
3040        radeon_ring_write(ring, 0x00000000);
3041
3042        /* Clear consts */
3043        radeon_ring_write(ring, 0xc0036f00);
3044        radeon_ring_write(ring, 0x00000bc4);
3045        radeon_ring_write(ring, 0xffffffff);
3046        radeon_ring_write(ring, 0xffffffff);
3047        radeon_ring_write(ring, 0xffffffff);
3048
3049        radeon_ring_write(ring, 0xc0026900);
3050        radeon_ring_write(ring, 0x00000316);
3051        radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3052        radeon_ring_write(ring, 0x00000010); /*  */
3053
3054        radeon_ring_unlock_commit(rdev, ring, false);
3055
3056        return 0;
3057}
3058
3059static int evergreen_cp_resume(struct radeon_device *rdev)
3060{
3061        struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3062        u32 tmp;
3063        u32 rb_bufsz;
3064        int r;
3065
3066        /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3067        WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3068                                 SOFT_RESET_PA |
3069                                 SOFT_RESET_SH |
3070                                 SOFT_RESET_VGT |
3071                                 SOFT_RESET_SPI |
3072                                 SOFT_RESET_SX));
3073        RREG32(GRBM_SOFT_RESET);
3074        mdelay(15);
3075        WREG32(GRBM_SOFT_RESET, 0);
3076        RREG32(GRBM_SOFT_RESET);
3077
3078        /* Set ring buffer size */
3079        rb_bufsz = order_base_2(ring->ring_size / 8);
3080        tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3081#ifdef __BIG_ENDIAN
3082        tmp |= BUF_SWAP_32BIT;
3083#endif
3084        WREG32(CP_RB_CNTL, tmp);
3085        WREG32(CP_SEM_WAIT_TIMER, 0x0);
3086        WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3087
3088        /* Set the write pointer delay */
3089        WREG32(CP_RB_WPTR_DELAY, 0);
3090
3091        /* Initialize the ring buffer's read and write pointers */
3092        WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3093        WREG32(CP_RB_RPTR_WR, 0);
3094        ring->wptr = 0;
3095        WREG32(CP_RB_WPTR, ring->wptr);
3096
3097        /* set the wb address whether it's enabled or not */
3098        WREG32(CP_RB_RPTR_ADDR,
3099               ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3100        WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3101        WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3102
3103        if (rdev->wb.enabled)
3104                WREG32(SCRATCH_UMSK, 0xff);
3105        else {
3106                tmp |= RB_NO_UPDATE;
3107                WREG32(SCRATCH_UMSK, 0);
3108        }
3109
3110        mdelay(1);
3111        WREG32(CP_RB_CNTL, tmp);
3112
3113        WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3114        WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3115
3116        evergreen_cp_start(rdev);
3117        ring->ready = true;
3118        r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3119        if (r) {
3120                ring->ready = false;
3121                return r;
3122        }
3123        return 0;
3124}
3125
3126/*
3127 * Core functions
3128 */
3129static void evergreen_gpu_init(struct radeon_device *rdev)
3130{
3131        u32 gb_addr_config;
3132        u32 mc_shared_chmap, mc_arb_ramcfg;
3133        u32 sx_debug_1;
3134        u32 smx_dc_ctl0;
3135        u32 sq_config;
3136        u32 sq_lds_resource_mgmt;
3137        u32 sq_gpr_resource_mgmt_1;
3138        u32 sq_gpr_resource_mgmt_2;
3139        u32 sq_gpr_resource_mgmt_3;
3140        u32 sq_thread_resource_mgmt;
3141        u32 sq_thread_resource_mgmt_2;
3142        u32 sq_stack_resource_mgmt_1;
3143        u32 sq_stack_resource_mgmt_2;
3144        u32 sq_stack_resource_mgmt_3;
3145        u32 vgt_cache_invalidation;
3146        u32 hdp_host_path_cntl, tmp;
3147        u32 disabled_rb_mask;
3148        int i, j, ps_thread_count;
3149
3150        switch (rdev->family) {
3151        case CHIP_CYPRESS:
3152        case CHIP_HEMLOCK:
3153                rdev->config.evergreen.num_ses = 2;
3154                rdev->config.evergreen.max_pipes = 4;
3155                rdev->config.evergreen.max_tile_pipes = 8;
3156                rdev->config.evergreen.max_simds = 10;
3157                rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3158                rdev->config.evergreen.max_gprs = 256;
3159                rdev->config.evergreen.max_threads = 248;
3160                rdev->config.evergreen.max_gs_threads = 32;
3161                rdev->config.evergreen.max_stack_entries = 512;
3162                rdev->config.evergreen.sx_num_of_sets = 4;
3163                rdev->config.evergreen.sx_max_export_size = 256;
3164                rdev->config.evergreen.sx_max_export_pos_size = 64;
3165                rdev->config.evergreen.sx_max_export_smx_size = 192;
3166                rdev->config.evergreen.max_hw_contexts = 8;
3167                rdev->config.evergreen.sq_num_cf_insts = 2;
3168
3169                rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3170                rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3171                rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3172                gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3173                break;
3174        case CHIP_JUNIPER:
3175                rdev->config.evergreen.num_ses = 1;
3176                rdev->config.evergreen.max_pipes = 4;
3177                rdev->config.evergreen.max_tile_pipes = 4;
3178                rdev->config.evergreen.max_simds = 10;
3179                rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3180                rdev->config.evergreen.max_gprs = 256;
3181                rdev->config.evergreen.max_threads = 248;
3182                rdev->config.evergreen.max_gs_threads = 32;
3183                rdev->config.evergreen.max_stack_entries = 512;
3184                rdev->config.evergreen.sx_num_of_sets = 4;
3185                rdev->config.evergreen.sx_max_export_size = 256;
3186                rdev->config.evergreen.sx_max_export_pos_size = 64;
3187                rdev->config.evergreen.sx_max_export_smx_size = 192;
3188                rdev->config.evergreen.max_hw_contexts = 8;
3189                rdev->config.evergreen.sq_num_cf_insts = 2;
3190
3191                rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3192                rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3193                rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3194                gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3195                break;
3196        case CHIP_REDWOOD:
3197                rdev->config.evergreen.num_ses = 1;
3198                rdev->config.evergreen.max_pipes = 4;
3199                rdev->config.evergreen.max_tile_pipes = 4;
3200                rdev->config.evergreen.max_simds = 5;
3201                rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3202                rdev->config.evergreen.max_gprs = 256;
3203                rdev->config.evergreen.max_threads = 248;
3204                rdev->config.evergreen.max_gs_threads = 32;
3205                rdev->config.evergreen.max_stack_entries = 256;
3206                rdev->config.evergreen.sx_num_of_sets = 4;
3207                rdev->config.evergreen.sx_max_export_size = 256;
3208                rdev->config.evergreen.sx_max_export_pos_size = 64;
3209                rdev->config.evergreen.sx_max_export_smx_size = 192;
3210                rdev->config.evergreen.max_hw_contexts = 8;
3211                rdev->config.evergreen.sq_num_cf_insts = 2;
3212
3213                rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3214                rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3215                rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3216                gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3217                break;
3218        case CHIP_CEDAR:
3219        default:
3220                rdev->config.evergreen.num_ses = 1;
3221                rdev->config.evergreen.max_pipes = 2;
3222                rdev->config.evergreen.max_tile_pipes = 2;
3223                rdev->config.evergreen.max_simds = 2;
3224                rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3225                rdev->config.evergreen.max_gprs = 256;
3226                rdev->config.evergreen.max_threads = 192;
3227                rdev->config.evergreen.max_gs_threads = 16;
3228                rdev->config.evergreen.max_stack_entries = 256;
3229                rdev->config.evergreen.sx_num_of_sets = 4;
3230                rdev->config.evergreen.sx_max_export_size = 128;
3231                rdev->config.evergreen.sx_max_export_pos_size = 32;
3232                rdev->config.evergreen.sx_max_export_smx_size = 96;
3233                rdev->config.evergreen.max_hw_contexts = 4;
3234                rdev->config.evergreen.sq_num_cf_insts = 1;
3235
3236                rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3237                rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3238                rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3239                gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3240                break;
3241        case CHIP_PALM:
3242                rdev->config.evergreen.num_ses = 1;
3243                rdev->config.evergreen.max_pipes = 2;
3244                rdev->config.evergreen.max_tile_pipes = 2;
3245                rdev->config.evergreen.max_simds = 2;
3246                rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3247                rdev->config.evergreen.max_gprs = 256;
3248                rdev->config.evergreen.max_threads = 192;
3249                rdev->config.evergreen.max_gs_threads = 16;
3250                rdev->config.evergreen.max_stack_entries = 256;
3251                rdev->config.evergreen.sx_num_of_sets = 4;
3252                rdev->config.evergreen.sx_max_export_size = 128;
3253                rdev->config.evergreen.sx_max_export_pos_size = 32;
3254                rdev->config.evergreen.sx_max_export_smx_size = 96;
3255                rdev->config.evergreen.max_hw_contexts = 4;
3256                rdev->config.evergreen.sq_num_cf_insts = 1;
3257
3258                rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3259                rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3260                rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3261                gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3262                break;
3263        case CHIP_SUMO:
3264                rdev->config.evergreen.num_ses = 1;
3265                rdev->config.evergreen.max_pipes = 4;
3266                rdev->config.evergreen.max_tile_pipes = 4;
3267                if (rdev->pdev->device == 0x9648)
3268                        rdev->config.evergreen.max_simds = 3;
3269                else if ((rdev->pdev->device == 0x9647) ||
3270                         (rdev->pdev->device == 0x964a))
3271                        rdev->config.evergreen.max_simds = 4;
3272                else
3273                        rdev->config.evergreen.max_simds = 5;
3274                rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3275                rdev->config.evergreen.max_gprs = 256;
3276                rdev->config.evergreen.max_threads = 248;
3277                rdev->config.evergreen.max_gs_threads = 32;
3278                rdev->config.evergreen.max_stack_entries = 256;
3279                rdev->config.evergreen.sx_num_of_sets = 4;
3280                rdev->config.evergreen.sx_max_export_size = 256;
3281                rdev->config.evergreen.sx_max_export_pos_size = 64;
3282                rdev->config.evergreen.sx_max_export_smx_size = 192;
3283                rdev->config.evergreen.max_hw_contexts = 8;
3284                rdev->config.evergreen.sq_num_cf_insts = 2;
3285
3286                rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3287                rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3288                rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3289                gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3290                break;
3291        case CHIP_SUMO2:
3292                rdev->config.evergreen.num_ses = 1;
3293                rdev->config.evergreen.max_pipes = 4;
3294                rdev->config.evergreen.max_tile_pipes = 4;
3295                rdev->config.evergreen.max_simds = 2;
3296                rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3297                rdev->config.evergreen.max_gprs = 256;
3298                rdev->config.evergreen.max_threads = 248;
3299                rdev->config.evergreen.max_gs_threads = 32;
3300                rdev->config.evergreen.max_stack_entries = 512;
3301                rdev->config.evergreen.sx_num_of_sets = 4;
3302                rdev->config.evergreen.sx_max_export_size = 256;
3303                rdev->config.evergreen.sx_max_export_pos_size = 64;
3304                rdev->config.evergreen.sx_max_export_smx_size = 192;
3305                rdev->config.evergreen.max_hw_contexts = 4;
3306                rdev->config.evergreen.sq_num_cf_insts = 2;
3307
3308                rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3309                rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3310                rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3311                gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3312                break;
3313        case CHIP_BARTS:
3314                rdev->config.evergreen.num_ses = 2;
3315                rdev->config.evergreen.max_pipes = 4;
3316                rdev->config.evergreen.max_tile_pipes = 8;
3317                rdev->config.evergreen.max_simds = 7;
3318                rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3319                rdev->config.evergreen.max_gprs = 256;
3320                rdev->config.evergreen.max_threads = 248;
3321                rdev->config.evergreen.max_gs_threads = 32;
3322                rdev->config.evergreen.max_stack_entries = 512;
3323                rdev->config.evergreen.sx_num_of_sets = 4;
3324                rdev->config.evergreen.sx_max_export_size = 256;
3325                rdev->config.evergreen.sx_max_export_pos_size = 64;
3326                rdev->config.evergreen.sx_max_export_smx_size = 192;
3327                rdev->config.evergreen.max_hw_contexts = 8;
3328                rdev->config.evergreen.sq_num_cf_insts = 2;
3329
3330                rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3331                rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3332                rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3333                gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3334                break;
3335        case CHIP_TURKS:
3336                rdev->config.evergreen.num_ses = 1;
3337                rdev->config.evergreen.max_pipes = 4;
3338                rdev->config.evergreen.max_tile_pipes = 4;
3339                rdev->config.evergreen.max_simds = 6;
3340                rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3341                rdev->config.evergreen.max_gprs = 256;
3342                rdev->config.evergreen.max_threads = 248;
3343                rdev->config.evergreen.max_gs_threads = 32;
3344                rdev->config.evergreen.max_stack_entries = 256;
3345                rdev->config.evergreen.sx_num_of_sets = 4;
3346                rdev->config.evergreen.sx_max_export_size = 256;
3347                rdev->config.evergreen.sx_max_export_pos_size = 64;
3348                rdev->config.evergreen.sx_max_export_smx_size = 192;
3349                rdev->config.evergreen.max_hw_contexts = 8;
3350                rdev->config.evergreen.sq_num_cf_insts = 2;
3351
3352                rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3353                rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3354                rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3355                gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3356                break;
3357        case CHIP_CAICOS:
3358                rdev->config.evergreen.num_ses = 1;
3359                rdev->config.evergreen.max_pipes = 2;
3360                rdev->config.evergreen.max_tile_pipes = 2;
3361                rdev->config.evergreen.max_simds = 2;
3362                rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3363                rdev->config.evergreen.max_gprs = 256;
3364                rdev->config.evergreen.max_threads = 192;
3365                rdev->config.evergreen.max_gs_threads = 16;
3366                rdev->config.evergreen.max_stack_entries = 256;
3367                rdev->config.evergreen.sx_num_of_sets = 4;
3368                rdev->config.evergreen.sx_max_export_size = 128;
3369                rdev->config.evergreen.sx_max_export_pos_size = 32;
3370                rdev->config.evergreen.sx_max_export_smx_size = 96;
3371                rdev->config.evergreen.max_hw_contexts = 4;
3372                rdev->config.evergreen.sq_num_cf_insts = 1;
3373
3374                rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3375                rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3376                rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3377                gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3378                break;
3379        }
3380
3381        /* Initialize HDP */
3382        for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3383                WREG32((0x2c14 + j), 0x00000000);
3384                WREG32((0x2c18 + j), 0x00000000);
3385                WREG32((0x2c1c + j), 0x00000000);
3386                WREG32((0x2c20 + j), 0x00000000);
3387                WREG32((0x2c24 + j), 0x00000000);
3388        }
3389
3390        WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3391        WREG32(SRBM_INT_CNTL, 0x1);
3392        WREG32(SRBM_INT_ACK, 0x1);
3393
3394        evergreen_fix_pci_max_read_req_size(rdev);
3395
3396        mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3397        if ((rdev->family == CHIP_PALM) ||
3398            (rdev->family == CHIP_SUMO) ||
3399            (rdev->family == CHIP_SUMO2))
3400                mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3401        else
3402                mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3403
3404        /* setup tiling info dword.  gb_addr_config is not adequate since it does
3405         * not have bank info, so create a custom tiling dword.
3406         * bits 3:0   num_pipes
3407         * bits 7:4   num_banks
3408         * bits 11:8  group_size
3409         * bits 15:12 row_size
3410         */
3411        rdev->config.evergreen.tile_config = 0;
3412        switch (rdev->config.evergreen.max_tile_pipes) {
3413        case 1:
3414        default:
3415                rdev->config.evergreen.tile_config |= (0 << 0);
3416                break;
3417        case 2:
3418                rdev->config.evergreen.tile_config |= (1 << 0);
3419                break;
3420        case 4:
3421                rdev->config.evergreen.tile_config |= (2 << 0);
3422                break;
3423        case 8:
3424                rdev->config.evergreen.tile_config |= (3 << 0);
3425                break;
3426        }
3427        /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3428        if (rdev->flags & RADEON_IS_IGP)
3429                rdev->config.evergreen.tile_config |= 1 << 4;
3430        else {
3431                switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3432                case 0: /* four banks */
3433                        rdev->config.evergreen.tile_config |= 0 << 4;
3434                        break;
3435                case 1: /* eight banks */
3436                        rdev->config.evergreen.tile_config |= 1 << 4;
3437                        break;
3438                case 2: /* sixteen banks */
3439                default:
3440                        rdev->config.evergreen.tile_config |= 2 << 4;
3441                        break;
3442                }
3443        }
3444        rdev->config.evergreen.tile_config |= 0 << 8;
3445        rdev->config.evergreen.tile_config |=
3446                ((gb_addr_config & 0x30000000) >> 28) << 12;
3447
3448        if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3449                u32 efuse_straps_4;
3450                u32 efuse_straps_3;
3451
3452                efuse_straps_4 = RREG32_RCU(0x204);
3453                efuse_straps_3 = RREG32_RCU(0x203);
3454                tmp = (((efuse_straps_4 & 0xf) << 4) |
3455                      ((efuse_straps_3 & 0xf0000000) >> 28));
3456        } else {
3457                tmp = 0;
3458                for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3459                        u32 rb_disable_bitmap;
3460
3461                        WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3462                        WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3463                        rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3464                        tmp <<= 4;
3465                        tmp |= rb_disable_bitmap;
3466                }
3467        }
3468        /* enabled rb are just the one not disabled :) */
3469        disabled_rb_mask = tmp;
3470        tmp = 0;
3471        for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3472                tmp |= (1 << i);
3473        /* if all the backends are disabled, fix it up here */
3474        if ((disabled_rb_mask & tmp) == tmp) {
3475                for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3476                        disabled_rb_mask &= ~(1 << i);
3477        }
3478
3479        for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3480                u32 simd_disable_bitmap;
3481
3482                WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3483                WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3484                simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3485                simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3486                tmp <<= 16;
3487                tmp |= simd_disable_bitmap;
3488        }
3489        rdev->config.evergreen.active_simds = hweight32(~tmp);
3490
3491        WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3492        WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3493
3494        WREG32(GB_ADDR_CONFIG, gb_addr_config);
3495        WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3496        WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3497        WREG32(DMA_TILING_CONFIG, gb_addr_config);
3498        WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3499        WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3500        WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3501
3502        if ((rdev->config.evergreen.max_backends == 1) &&
3503            (rdev->flags & RADEON_IS_IGP)) {
3504                if ((disabled_rb_mask & 3) == 1) {
3505                        /* RB0 disabled, RB1 enabled */
3506                        tmp = 0x11111111;
3507                } else {
3508                        /* RB1 disabled, RB0 enabled */
3509                        tmp = 0x00000000;
3510                }
3511        } else {
3512                tmp = gb_addr_config & NUM_PIPES_MASK;
3513                tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3514                                                EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3515        }
3516        WREG32(GB_BACKEND_MAP, tmp);
3517
3518        WREG32(CGTS_SYS_TCC_DISABLE, 0);
3519        WREG32(CGTS_TCC_DISABLE, 0);
3520        WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3521        WREG32(CGTS_USER_TCC_DISABLE, 0);
3522
3523        /* set HW defaults for 3D engine */
3524        WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3525                                     ROQ_IB2_START(0x2b)));
3526
3527        WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3528
3529        WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3530                             SYNC_GRADIENT |
3531                             SYNC_WALKER |
3532                             SYNC_ALIGNER));
3533
3534        sx_debug_1 = RREG32(SX_DEBUG_1);
3535        sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3536        WREG32(SX_DEBUG_1, sx_debug_1);
3537
3538
3539        smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3540        smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3541        smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3542        WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3543
3544        if (rdev->family <= CHIP_SUMO2)
3545                WREG32(SMX_SAR_CTL0, 0x00010000);
3546
3547        WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3548                                        POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3549                                        SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3550
3551        WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3552                                 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3553                                 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3554
3555        WREG32(VGT_NUM_INSTANCES, 1);
3556        WREG32(SPI_CONFIG_CNTL, 0);
3557        WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3558        WREG32(CP_PERFMON_CNTL, 0);
3559
3560        WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3561                                  FETCH_FIFO_HIWATER(0x4) |
3562                                  DONE_FIFO_HIWATER(0xe0) |
3563                                  ALU_UPDATE_FIFO_HIWATER(0x8)));
3564
3565        sq_config = RREG32(SQ_CONFIG);
3566        sq_config &= ~(PS_PRIO(3) |
3567                       VS_PRIO(3) |
3568                       GS_PRIO(3) |
3569                       ES_PRIO(3));
3570        sq_config |= (VC_ENABLE |
3571                      EXPORT_SRC_C |
3572                      PS_PRIO(0) |
3573                      VS_PRIO(1) |
3574                      GS_PRIO(2) |
3575                      ES_PRIO(3));
3576
3577        switch (rdev->family) {
3578        case CHIP_CEDAR:
3579        case CHIP_PALM:
3580        case CHIP_SUMO:
3581        case CHIP_SUMO2:
3582        case CHIP_CAICOS:
3583                /* no vertex cache */
3584                sq_config &= ~VC_ENABLE;
3585                break;
3586        default:
3587                break;
3588        }
3589
3590        sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3591
3592        sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3593        sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3594        sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3595        sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3596        sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3597        sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3598        sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3599
3600        switch (rdev->family) {
3601        case CHIP_CEDAR:
3602        case CHIP_PALM:
3603        case CHIP_SUMO:
3604        case CHIP_SUMO2:
3605                ps_thread_count = 96;
3606                break;
3607        default:
3608                ps_thread_count = 128;
3609                break;
3610        }
3611
3612        sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3613        sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3614        sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3615        sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3616        sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3617        sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3618
3619        sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3620        sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3621        sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3622        sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3623        sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3624        sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3625
3626        WREG32(SQ_CONFIG, sq_config);
3627        WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3628        WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3629        WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3630        WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3631        WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3632        WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3633        WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3634        WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3635        WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3636        WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3637
3638        WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3639                                          FORCE_EOV_MAX_REZ_CNT(255)));
3640
3641        switch (rdev->family) {
3642        case CHIP_CEDAR:
3643        case CHIP_PALM:
3644        case CHIP_SUMO:
3645        case CHIP_SUMO2:
3646        case CHIP_CAICOS:
3647                vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3648                break;
3649        default:
3650                vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3651                break;
3652        }
3653        vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3654        WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3655
3656        WREG32(VGT_GS_VERTEX_REUSE, 16);
3657        WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3658        WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3659
3660        WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3661        WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3662
3663        WREG32(CB_PERF_CTR0_SEL_0, 0);
3664        WREG32(CB_PERF_CTR0_SEL_1, 0);
3665        WREG32(CB_PERF_CTR1_SEL_0, 0);
3666        WREG32(CB_PERF_CTR1_SEL_1, 0);
3667        WREG32(CB_PERF_CTR2_SEL_0, 0);
3668        WREG32(CB_PERF_CTR2_SEL_1, 0);
3669        WREG32(CB_PERF_CTR3_SEL_0, 0);
3670        WREG32(CB_PERF_CTR3_SEL_1, 0);
3671
3672        /* clear render buffer base addresses */
3673        WREG32(CB_COLOR0_BASE, 0);
3674        WREG32(CB_COLOR1_BASE, 0);
3675        WREG32(CB_COLOR2_BASE, 0);
3676        WREG32(CB_COLOR3_BASE, 0);
3677        WREG32(CB_COLOR4_BASE, 0);
3678        WREG32(CB_COLOR5_BASE, 0);
3679        WREG32(CB_COLOR6_BASE, 0);
3680        WREG32(CB_COLOR7_BASE, 0);
3681        WREG32(CB_COLOR8_BASE, 0);
3682        WREG32(CB_COLOR9_BASE, 0);
3683        WREG32(CB_COLOR10_BASE, 0);
3684        WREG32(CB_COLOR11_BASE, 0);
3685
3686        /* set the shader const cache sizes to 0 */
3687        for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3688                WREG32(i, 0);
3689        for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3690                WREG32(i, 0);
3691
3692        tmp = RREG32(HDP_MISC_CNTL);
3693        tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3694        WREG32(HDP_MISC_CNTL, tmp);
3695
3696        hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3697        WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3698
3699        WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3700
3701        udelay(50);
3702
3703}
3704
3705int evergreen_mc_init(struct radeon_device *rdev)
3706{
3707        u32 tmp;
3708        int chansize, numchan;
3709
3710        /* Get VRAM informations */
3711        rdev->mc.vram_is_ddr = true;
3712        if ((rdev->family == CHIP_PALM) ||
3713            (rdev->family == CHIP_SUMO) ||
3714            (rdev->family == CHIP_SUMO2))
3715                tmp = RREG32(FUS_MC_ARB_RAMCFG);
3716        else
3717                tmp = RREG32(MC_ARB_RAMCFG);
3718        if (tmp & CHANSIZE_OVERRIDE) {
3719                chansize = 16;
3720        } else if (tmp & CHANSIZE_MASK) {
3721                chansize = 64;
3722        } else {
3723                chansize = 32;
3724        }
3725        tmp = RREG32(MC_SHARED_CHMAP);
3726        switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3727        case 0:
3728        default:
3729                numchan = 1;
3730                break;
3731        case 1:
3732                numchan = 2;
3733                break;
3734        case 2:
3735                numchan = 4;
3736                break;
3737        case 3:
3738                numchan = 8;
3739                break;
3740        }
3741        rdev->mc.vram_width = numchan * chansize;
3742        /* Could aper size report 0 ? */
3743        rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3744        rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3745        /* Setup GPU memory space */
3746        if ((rdev->family == CHIP_PALM) ||
3747            (rdev->family == CHIP_SUMO) ||
3748            (rdev->family == CHIP_SUMO2)) {
3749                /* size in bytes on fusion */
3750                rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3751                rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3752        } else {
3753                /* size in MB on evergreen/cayman/tn */
3754                rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3755                rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3756        }
3757        rdev->mc.visible_vram_size = rdev->mc.aper_size;
3758        r700_vram_gtt_location(rdev, &rdev->mc);
3759        radeon_update_bandwidth_info(rdev);
3760
3761        return 0;
3762}
3763
3764void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3765{
3766        dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3767                RREG32(GRBM_STATUS));
3768        dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3769                RREG32(GRBM_STATUS_SE0));
3770        dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3771                RREG32(GRBM_STATUS_SE1));
3772        dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3773                RREG32(SRBM_STATUS));
3774        dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3775                RREG32(SRBM_STATUS2));
3776        dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3777                RREG32(CP_STALLED_STAT1));
3778        dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3779                RREG32(CP_STALLED_STAT2));
3780        dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3781                RREG32(CP_BUSY_STAT));
3782        dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3783                RREG32(CP_STAT));
3784        dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3785                RREG32(DMA_STATUS_REG));
3786        if (rdev->family >= CHIP_CAYMAN) {
3787                dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3788                         RREG32(DMA_STATUS_REG + 0x800));
3789        }
3790}
3791
3792bool evergreen_is_display_hung(struct radeon_device *rdev)
3793{
3794        u32 crtc_hung = 0;
3795        u32 crtc_status[6];
3796        u32 i, j, tmp;
3797
3798        for (i = 0; i < rdev->num_crtc; i++) {
3799                if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3800                        crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3801                        crtc_hung |= (1 << i);
3802                }
3803        }
3804
3805        for (j = 0; j < 10; j++) {
3806                for (i = 0; i < rdev->num_crtc; i++) {
3807                        if (crtc_hung & (1 << i)) {
3808                                tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3809                                if (tmp != crtc_status[i])
3810                                        crtc_hung &= ~(1 << i);
3811                        }
3812                }
3813                if (crtc_hung == 0)
3814                        return false;
3815                udelay(100);
3816        }
3817
3818        return true;
3819}
3820
3821u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3822{
3823        u32 reset_mask = 0;
3824        u32 tmp;
3825
3826        /* GRBM_STATUS */
3827        tmp = RREG32(GRBM_STATUS);
3828        if (tmp & (PA_BUSY | SC_BUSY |
3829                   SH_BUSY | SX_BUSY |
3830                   TA_BUSY | VGT_BUSY |
3831                   DB_BUSY | CB_BUSY |
3832                   SPI_BUSY | VGT_BUSY_NO_DMA))
3833                reset_mask |= RADEON_RESET_GFX;
3834
3835        if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3836                   CP_BUSY | CP_COHERENCY_BUSY))
3837                reset_mask |= RADEON_RESET_CP;
3838
3839        if (tmp & GRBM_EE_BUSY)
3840                reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3841
3842        /* DMA_STATUS_REG */
3843        tmp = RREG32(DMA_STATUS_REG);
3844        if (!(tmp & DMA_IDLE))
3845                reset_mask |= RADEON_RESET_DMA;
3846
3847        /* SRBM_STATUS2 */
3848        tmp = RREG32(SRBM_STATUS2);
3849        if (tmp & DMA_BUSY)
3850                reset_mask |= RADEON_RESET_DMA;
3851
3852        /* SRBM_STATUS */
3853        tmp = RREG32(SRBM_STATUS);
3854        if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3855                reset_mask |= RADEON_RESET_RLC;
3856
3857        if (tmp & IH_BUSY)
3858                reset_mask |= RADEON_RESET_IH;
3859
3860        if (tmp & SEM_BUSY)
3861                reset_mask |= RADEON_RESET_SEM;
3862
3863        if (tmp & GRBM_RQ_PENDING)
3864                reset_mask |= RADEON_RESET_GRBM;
3865
3866        if (tmp & VMC_BUSY)
3867                reset_mask |= RADEON_RESET_VMC;
3868
3869        if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3870                   MCC_BUSY | MCD_BUSY))
3871                reset_mask |= RADEON_RESET_MC;
3872
3873        if (evergreen_is_display_hung(rdev))
3874                reset_mask |= RADEON_RESET_DISPLAY;
3875
3876        /* VM_L2_STATUS */
3877        tmp = RREG32(VM_L2_STATUS);
3878        if (tmp & L2_BUSY)
3879                reset_mask |= RADEON_RESET_VMC;
3880
3881        /* Skip MC reset as it's mostly likely not hung, just busy */
3882        if (reset_mask & RADEON_RESET_MC) {
3883                DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3884                reset_mask &= ~RADEON_RESET_MC;
3885        }
3886
3887        return reset_mask;
3888}
3889
3890static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3891{
3892        struct evergreen_mc_save save;
3893        u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3894        u32 tmp;
3895
3896        if (reset_mask == 0)
3897                return;
3898
3899        dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3900
3901        evergreen_print_gpu_status_regs(rdev);
3902
3903        /* Disable CP parsing/prefetching */
3904        WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3905
3906        if (reset_mask & RADEON_RESET_DMA) {
3907                /* Disable DMA */
3908                tmp = RREG32(DMA_RB_CNTL);
3909                tmp &= ~DMA_RB_ENABLE;
3910                WREG32(DMA_RB_CNTL, tmp);
3911        }
3912
3913        udelay(50);
3914
3915        evergreen_mc_stop(rdev, &save);
3916        if (evergreen_mc_wait_for_idle(rdev)) {
3917                dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3918        }
3919
3920        if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3921                grbm_soft_reset |= SOFT_RESET_DB |
3922                        SOFT_RESET_CB |
3923                        SOFT_RESET_PA |
3924                        SOFT_RESET_SC |
3925                        SOFT_RESET_SPI |
3926                        SOFT_RESET_SX |
3927                        SOFT_RESET_SH |
3928                        SOFT_RESET_TC |
3929                        SOFT_RESET_TA |
3930                        SOFT_RESET_VC |
3931                        SOFT_RESET_VGT;
3932        }
3933
3934        if (reset_mask & RADEON_RESET_CP) {
3935                grbm_soft_reset |= SOFT_RESET_CP |
3936                        SOFT_RESET_VGT;
3937
3938                srbm_soft_reset |= SOFT_RESET_GRBM;
3939        }
3940
3941        if (reset_mask & RADEON_RESET_DMA)
3942                srbm_soft_reset |= SOFT_RESET_DMA;
3943
3944        if (reset_mask & RADEON_RESET_DISPLAY)
3945                srbm_soft_reset |= SOFT_RESET_DC;
3946
3947        if (reset_mask & RADEON_RESET_RLC)
3948                srbm_soft_reset |= SOFT_RESET_RLC;
3949
3950        if (reset_mask & RADEON_RESET_SEM)
3951                srbm_soft_reset |= SOFT_RESET_SEM;
3952
3953        if (reset_mask & RADEON_RESET_IH)
3954                srbm_soft_reset |= SOFT_RESET_IH;
3955
3956        if (reset_mask & RADEON_RESET_GRBM)
3957                srbm_soft_reset |= SOFT_RESET_GRBM;
3958
3959        if (reset_mask & RADEON_RESET_VMC)
3960                srbm_soft_reset |= SOFT_RESET_VMC;
3961
3962        if (!(rdev->flags & RADEON_IS_IGP)) {
3963                if (reset_mask & RADEON_RESET_MC)
3964                        srbm_soft_reset |= SOFT_RESET_MC;
3965        }
3966
3967        if (grbm_soft_reset) {
3968                tmp = RREG32(GRBM_SOFT_RESET);
3969                tmp |= grbm_soft_reset;
3970                dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3971                WREG32(GRBM_SOFT_RESET, tmp);
3972                tmp = RREG32(GRBM_SOFT_RESET);
3973
3974                udelay(50);
3975
3976                tmp &= ~grbm_soft_reset;
3977                WREG32(GRBM_SOFT_RESET, tmp);
3978                tmp = RREG32(GRBM_SOFT_RESET);
3979        }
3980
3981        if (srbm_soft_reset) {
3982                tmp = RREG32(SRBM_SOFT_RESET);
3983                tmp |= srbm_soft_reset;
3984                dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3985                WREG32(SRBM_SOFT_RESET, tmp);
3986                tmp = RREG32(SRBM_SOFT_RESET);
3987
3988                udelay(50);
3989
3990                tmp &= ~srbm_soft_reset;
3991                WREG32(SRBM_SOFT_RESET, tmp);
3992                tmp = RREG32(SRBM_SOFT_RESET);
3993        }
3994
3995        /* Wait a little for things to settle down */
3996        udelay(50);
3997
3998        evergreen_mc_resume(rdev, &save);
3999        udelay(50);
4000
4001        evergreen_print_gpu_status_regs(rdev);
4002}
4003
4004void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4005{
4006        struct evergreen_mc_save save;
4007        u32 tmp, i;
4008
4009        dev_info(rdev->dev, "GPU pci config reset\n");
4010
4011        /* disable dpm? */
4012
4013        /* Disable CP parsing/prefetching */
4014        WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4015        udelay(50);
4016        /* Disable DMA */
4017        tmp = RREG32(DMA_RB_CNTL);
4018        tmp &= ~DMA_RB_ENABLE;
4019        WREG32(DMA_RB_CNTL, tmp);
4020        /* XXX other engines? */
4021
4022        /* halt the rlc */
4023        r600_rlc_stop(rdev);
4024
4025        udelay(50);
4026
4027        /* set mclk/sclk to bypass */
4028        rv770_set_clk_bypass_mode(rdev);
4029        /* disable BM */
4030        pci_clear_master(rdev->pdev);
4031        /* disable mem access */
4032        evergreen_mc_stop(rdev, &save);
4033        if (evergreen_mc_wait_for_idle(rdev)) {
4034                dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4035        }
4036        /* reset */
4037        radeon_pci_config_reset(rdev);
4038        /* wait for asic to come out of reset */
4039        for (i = 0; i < rdev->usec_timeout; i++) {
4040                if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4041                        break;
4042                udelay(1);
4043        }
4044}
4045
4046int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4047{
4048        u32 reset_mask;
4049
4050        if (hard) {
4051                evergreen_gpu_pci_config_reset(rdev);
4052                return 0;
4053        }
4054
4055        reset_mask = evergreen_gpu_check_soft_reset(rdev);
4056
4057        if (reset_mask)
4058                r600_set_bios_scratch_engine_hung(rdev, true);
4059
4060        /* try soft reset */
4061        evergreen_gpu_soft_reset(rdev, reset_mask);
4062
4063        reset_mask = evergreen_gpu_check_soft_reset(rdev);
4064
4065        /* try pci config reset */
4066        if (reset_mask && radeon_hard_reset)
4067                evergreen_gpu_pci_config_reset(rdev);
4068
4069        reset_mask = evergreen_gpu_check_soft_reset(rdev);
4070
4071        if (!reset_mask)
4072                r600_set_bios_scratch_engine_hung(rdev, false);
4073
4074        return 0;
4075}
4076
4077/**
4078 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4079 *
4080 * @rdev: radeon_device pointer
4081 * @ring: radeon_ring structure holding ring information
4082 *
4083 * Check if the GFX engine is locked up.
4084 * Returns true if the engine appears to be locked up, false if not.
4085 */
4086bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4087{
4088        u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4089
4090        if (!(reset_mask & (RADEON_RESET_GFX |
4091                            RADEON_RESET_COMPUTE |
4092                            RADEON_RESET_CP))) {
4093                radeon_ring_lockup_update(rdev, ring);
4094                return false;
4095        }
4096        return radeon_ring_test_lockup(rdev, ring);
4097}
4098
4099/*
4100 * RLC
4101 */
4102#define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4103#define RLC_CLEAR_STATE_END_MARKER          0x00000001
4104
4105void sumo_rlc_fini(struct radeon_device *rdev)
4106{
4107        int r;
4108
4109        /* save restore block */
4110        if (rdev->rlc.save_restore_obj) {
4111                r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4112                if (unlikely(r != 0))
4113                        dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4114                radeon_bo_unpin(rdev->rlc.save_restore_obj);
4115                radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4116
4117                radeon_bo_unref(&rdev->rlc.save_restore_obj);
4118                rdev->rlc.save_restore_obj = NULL;
4119        }
4120
4121        /* clear state block */
4122        if (rdev->rlc.clear_state_obj) {
4123                r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4124                if (unlikely(r != 0))
4125                        dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4126                radeon_bo_unpin(rdev->rlc.clear_state_obj);
4127                radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4128
4129                radeon_bo_unref(&rdev->rlc.clear_state_obj);
4130                rdev->rlc.clear_state_obj = NULL;
4131        }
4132
4133        /* clear state block */
4134        if (rdev->rlc.cp_table_obj) {
4135                r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4136                if (unlikely(r != 0))
4137                        dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4138                radeon_bo_unpin(rdev->rlc.cp_table_obj);
4139                radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4140
4141                radeon_bo_unref(&rdev->rlc.cp_table_obj);
4142                rdev->rlc.cp_table_obj = NULL;
4143        }
4144}
4145
4146#define CP_ME_TABLE_SIZE    96
4147
4148int sumo_rlc_init(struct radeon_device *rdev)
4149{
4150        const u32 *src_ptr;
4151        volatile u32 *dst_ptr;
4152        u32 dws, data, i, j, k, reg_num;
4153        u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4154        u64 reg_list_mc_addr;
4155        const struct cs_section_def *cs_data;
4156        int r;
4157
4158        src_ptr = rdev->rlc.reg_list;
4159        dws = rdev->rlc.reg_list_size;
4160        if (rdev->family >= CHIP_BONAIRE) {
4161                dws += (5 * 16) + 48 + 48 + 64;
4162        }
4163        cs_data = rdev->rlc.cs_data;
4164
4165        if (src_ptr) {
4166                /* save restore block */
4167                if (rdev->rlc.save_restore_obj == NULL) {
4168                        r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4169                                             RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4170                                             NULL, &rdev->rlc.save_restore_obj);
4171                        if (r) {
4172                                dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4173                                return r;
4174                        }
4175                }
4176
4177                r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4178                if (unlikely(r != 0)) {
4179                        sumo_rlc_fini(rdev);
4180                        return r;
4181                }
4182                r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4183                                  &rdev->rlc.save_restore_gpu_addr);
4184                if (r) {
4185                        radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4186                        dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4187                        sumo_rlc_fini(rdev);
4188                        return r;
4189                }
4190
4191                r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4192                if (r) {
4193                        dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4194                        sumo_rlc_fini(rdev);
4195                        return r;
4196                }
4197                /* write the sr buffer */
4198                dst_ptr = rdev->rlc.sr_ptr;
4199                if (rdev->family >= CHIP_TAHITI) {
4200                        /* SI */
4201                        for (i = 0; i < rdev->rlc.reg_list_size; i++)
4202                                dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4203                } else {
4204                        /* ON/LN/TN */
4205                        /* format:
4206                         * dw0: (reg2 << 16) | reg1
4207                         * dw1: reg1 save space
4208                         * dw2: reg2 save space
4209                         */
4210                        for (i = 0; i < dws; i++) {
4211                                data = src_ptr[i] >> 2;
4212                                i++;
4213                                if (i < dws)
4214                                        data |= (src_ptr[i] >> 2) << 16;
4215                                j = (((i - 1) * 3) / 2);
4216                                dst_ptr[j] = cpu_to_le32(data);
4217                        }
4218                        j = ((i * 3) / 2);
4219                        dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4220                }
4221                radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4222                radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4223        }
4224
4225        if (cs_data) {
4226                /* clear state block */
4227                if (rdev->family >= CHIP_BONAIRE) {
4228                        rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4229                } else if (rdev->family >= CHIP_TAHITI) {
4230                        rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4231                        dws = rdev->rlc.clear_state_size + (256 / 4);
4232                } else {
4233                        reg_list_num = 0;
4234                        dws = 0;
4235                        for (i = 0; cs_data[i].section != NULL; i++) {
4236                                for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4237                                        reg_list_num++;
4238                                        dws += cs_data[i].section[j].reg_count;
4239                                }
4240                        }
4241                        reg_list_blk_index = (3 * reg_list_num + 2);
4242                        dws += reg_list_blk_index;
4243                        rdev->rlc.clear_state_size = dws;
4244                }
4245
4246                if (rdev->rlc.clear_state_obj == NULL) {
4247                        r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4248                                             RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4249                                             NULL, &rdev->rlc.clear_state_obj);
4250                        if (r) {
4251                                dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4252                                sumo_rlc_fini(rdev);
4253                                return r;
4254                        }
4255                }
4256                r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4257                if (unlikely(r != 0)) {
4258                        sumo_rlc_fini(rdev);
4259                        return r;
4260                }
4261                r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4262                                  &rdev->rlc.clear_state_gpu_addr);
4263                if (r) {
4264                        radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4265                        dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4266                        sumo_rlc_fini(rdev);
4267                        return r;
4268                }
4269
4270                r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4271                if (r) {
4272                        dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4273                        sumo_rlc_fini(rdev);
4274                        return r;
4275                }
4276                /* set up the cs buffer */
4277                dst_ptr = rdev->rlc.cs_ptr;
4278                if (rdev->family >= CHIP_BONAIRE) {
4279                        cik_get_csb_buffer(rdev, dst_ptr);
4280                } else if (rdev->family >= CHIP_TAHITI) {
4281                        reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4282                        dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4283                        dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4284                        dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4285                        si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4286                } else {
4287                        reg_list_hdr_blk_index = 0;
4288                        reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4289                        data = upper_32_bits(reg_list_mc_addr);
4290                        dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4291                        reg_list_hdr_blk_index++;
4292                        for (i = 0; cs_data[i].section != NULL; i++) {
4293                                for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4294                                        reg_num = cs_data[i].section[j].reg_count;
4295                                        data = reg_list_mc_addr & 0xffffffff;
4296                                        dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4297                                        reg_list_hdr_blk_index++;
4298
4299                                        data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4300                                        dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4301                                        reg_list_hdr_blk_index++;
4302
4303                                        data = 0x08000000 | (reg_num * 4);
4304                                        dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4305                                        reg_list_hdr_blk_index++;
4306
4307                                        for (k = 0; k < reg_num; k++) {
4308                                                data = cs_data[i].section[j].extent[k];
4309                                                dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4310                                        }
4311                                        reg_list_mc_addr += reg_num * 4;
4312                                        reg_list_blk_index += reg_num;
4313                                }
4314                        }
4315                        dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4316                }
4317                radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4318                radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4319        }
4320
4321        if (rdev->rlc.cp_table_size) {
4322                if (rdev->rlc.cp_table_obj == NULL) {
4323                        r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4324                                             PAGE_SIZE, true,
4325                                             RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4326                                             NULL, &rdev->rlc.cp_table_obj);
4327                        if (r) {
4328                                dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4329                                sumo_rlc_fini(rdev);
4330                                return r;
4331                        }
4332                }
4333
4334                r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4335                if (unlikely(r != 0)) {
4336                        dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4337                        sumo_rlc_fini(rdev);
4338                        return r;
4339                }
4340                r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4341                                  &rdev->rlc.cp_table_gpu_addr);
4342                if (r) {
4343                        radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4344                        dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4345                        sumo_rlc_fini(rdev);
4346                        return r;
4347                }
4348                r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4349                if (r) {
4350                        dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4351                        sumo_rlc_fini(rdev);
4352                        return r;
4353                }
4354
4355                cik_init_cp_pg_table(rdev);
4356
4357                radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4358                radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4359
4360        }
4361
4362        return 0;
4363}
4364
4365static void evergreen_rlc_start(struct radeon_device *rdev)
4366{
4367        u32 mask = RLC_ENABLE;
4368
4369        if (rdev->flags & RADEON_IS_IGP) {
4370                mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4371        }
4372
4373        WREG32(RLC_CNTL, mask);
4374}
4375
4376int evergreen_rlc_resume(struct radeon_device *rdev)
4377{
4378        u32 i;
4379        const __be32 *fw_data;
4380
4381        if (!rdev->rlc_fw)
4382                return -EINVAL;
4383
4384        r600_rlc_stop(rdev);
4385
4386        WREG32(RLC_HB_CNTL, 0);
4387
4388        if (rdev->flags & RADEON_IS_IGP) {
4389                if (rdev->family == CHIP_ARUBA) {
4390                        u32 always_on_bitmap =
4391                                3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4392                        /* find out the number of active simds */
4393                        u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4394                        tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4395                        tmp = hweight32(~tmp);
4396                        if (tmp == rdev->config.cayman.max_simds_per_se) {
4397                                WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4398                                WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4399                                WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4400                                WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4401                                WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4402                        }
4403                } else {
4404                        WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4405                        WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4406                }
4407                WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4408                WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4409        } else {
4410                WREG32(RLC_HB_BASE, 0);
4411                WREG32(RLC_HB_RPTR, 0);
4412                WREG32(RLC_HB_WPTR, 0);
4413                WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4414                WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4415        }
4416        WREG32(RLC_MC_CNTL, 0);
4417        WREG32(RLC_UCODE_CNTL, 0);
4418
4419        fw_data = (const __be32 *)rdev->rlc_fw->data;
4420        if (rdev->family >= CHIP_ARUBA) {
4421                for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4422                        WREG32(RLC_UCODE_ADDR, i);
4423                        WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4424                }
4425        } else if (rdev->family >= CHIP_CAYMAN) {
4426                for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4427                        WREG32(RLC_UCODE_ADDR, i);
4428                        WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4429                }
4430        } else {
4431                for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4432                        WREG32(RLC_UCODE_ADDR, i);
4433                        WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4434                }
4435        }
4436        WREG32(RLC_UCODE_ADDR, 0);
4437
4438        evergreen_rlc_start(rdev);
4439
4440        return 0;
4441}
4442
4443/* Interrupts */
4444
4445u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4446{
4447        if (crtc >= rdev->num_crtc)
4448                return 0;
4449        else
4450                return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4451}
4452
4453void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4454{
4455        int i;
4456        u32 tmp;
4457
4458        if (rdev->family >= CHIP_CAYMAN) {
4459                cayman_cp_int_cntl_setup(rdev, 0,
4460                                         CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4461                cayman_cp_int_cntl_setup(rdev, 1, 0);
4462                cayman_cp_int_cntl_setup(rdev, 2, 0);
4463                tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4464                WREG32(CAYMAN_DMA1_CNTL, tmp);
4465        } else
4466                WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4467        tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4468        WREG32(DMA_CNTL, tmp);
4469        WREG32(GRBM_INT_CNTL, 0);
4470        WREG32(SRBM_INT_CNTL, 0);
4471        for (i = 0; i < rdev->num_crtc; i++)
4472                WREG32(INT_MASK + crtc_offsets[i], 0);
4473        for (i = 0; i < rdev->num_crtc; i++)
4474                WREG32(GRPH_INT_CONTROL + crtc_offsets[i], 0);
4475
4476        /* only one DAC on DCE5 */
4477        if (!ASIC_IS_DCE5(rdev))
4478                WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4479        WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4480
4481        for (i = 0; i < 6; i++)
4482                WREG32_AND(DC_HPDx_INT_CONTROL(i), DC_HPDx_INT_POLARITY);
4483}
4484
4485/* Note that the order we write back regs here is important */
4486int evergreen_irq_set(struct radeon_device *rdev)
4487{
4488        int i;
4489        u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4490        u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4491        u32 grbm_int_cntl = 0;
4492        u32 dma_cntl, dma_cntl1 = 0;
4493        u32 thermal_int = 0;
4494
4495        if (!rdev->irq.installed) {
4496                WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4497                return -EINVAL;
4498        }
4499        /* don't enable anything if the ih is disabled */
4500        if (!rdev->ih.enabled) {
4501                r600_disable_interrupts(rdev);
4502                /* force the active interrupt state to all disabled */
4503                evergreen_disable_interrupt_state(rdev);
4504                return 0;
4505        }
4506
4507        if (rdev->family == CHIP_ARUBA)
4508                thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4509                        ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4510        else
4511                thermal_int = RREG32(CG_THERMAL_INT) &
4512                        ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4513
4514        dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4515
4516        if (rdev->family >= CHIP_CAYMAN) {
4517                /* enable CP interrupts on all rings */
4518                if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4519                        DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4520                        cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4521                }
4522                if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4523                        DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4524                        cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4525                }
4526                if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4527                        DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4528                        cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4529                }
4530        } else {
4531                if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4532                        DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4533                        cp_int_cntl |= RB_INT_ENABLE;
4534                        cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4535                }
4536        }
4537
4538        if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4539                DRM_DEBUG("r600_irq_set: sw int dma\n");
4540                dma_cntl |= TRAP_ENABLE;
4541        }
4542
4543        if (rdev->family >= CHIP_CAYMAN) {
4544                dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4545                if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4546                        DRM_DEBUG("r600_irq_set: sw int dma1\n");
4547                        dma_cntl1 |= TRAP_ENABLE;
4548                }
4549        }
4550
4551        if (rdev->irq.dpm_thermal) {
4552                DRM_DEBUG("dpm thermal\n");
4553                thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4554        }
4555
4556        if (rdev->family >= CHIP_CAYMAN) {
4557                cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4558                cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4559                cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4560        } else
4561                WREG32(CP_INT_CNTL, cp_int_cntl);
4562
4563        WREG32(DMA_CNTL, dma_cntl);
4564
4565        if (rdev->family >= CHIP_CAYMAN)
4566                WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4567
4568        WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4569
4570        for (i = 0; i < rdev->num_crtc; i++) {
4571                radeon_irq_kms_set_irq_n_enabled(
4572                    rdev, INT_MASK + crtc_offsets[i],
4573                    VBLANK_INT_MASK,
4574                    rdev->irq.crtc_vblank_int[i] ||
4575                    atomic_read(&rdev->irq.pflip[i]), "vblank", i);
4576        }
4577
4578        for (i = 0; i < rdev->num_crtc; i++)
4579                WREG32(GRPH_INT_CONTROL + crtc_offsets[i], GRPH_PFLIP_INT_MASK);
4580
4581        for (i = 0; i < 6; i++) {
4582                radeon_irq_kms_set_irq_n_enabled(
4583                    rdev, DC_HPDx_INT_CONTROL(i),
4584                    DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN,
4585                    rdev->irq.hpd[i], "HPD", i);
4586        }
4587
4588        if (rdev->family == CHIP_ARUBA)
4589                WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4590        else
4591                WREG32(CG_THERMAL_INT, thermal_int);
4592
4593        for (i = 0; i < 6; i++) {
4594                radeon_irq_kms_set_irq_n_enabled(
4595                    rdev, AFMT_AUDIO_PACKET_CONTROL + crtc_offsets[i],
4596                    AFMT_AZ_FORMAT_WTRIG_MASK,
4597                    rdev->irq.afmt[i], "HDMI", i);
4598        }
4599
4600        /* posting read */
4601        RREG32(SRBM_STATUS);
4602
4603        return 0;
4604}
4605
4606/* Note that the order we write back regs here is important */
4607static void evergreen_irq_ack(struct radeon_device *rdev)
4608{
4609        int i, j;
4610        u32 *grph_int = rdev->irq.stat_regs.evergreen.grph_int;
4611        u32 *disp_int = rdev->irq.stat_regs.evergreen.disp_int;
4612        u32 *afmt_status = rdev->irq.stat_regs.evergreen.afmt_status;
4613
4614        for (i = 0; i < 6; i++) {
4615                disp_int[i] = RREG32(evergreen_disp_int_status[i]);
4616                afmt_status[i] = RREG32(AFMT_STATUS + crtc_offsets[i]);
4617                if (i < rdev->num_crtc)
4618                        grph_int[i] = RREG32(GRPH_INT_STATUS + crtc_offsets[i]);
4619        }
4620
4621        /* We write back each interrupt register in pairs of two */
4622        for (i = 0; i < rdev->num_crtc; i += 2) {
4623                for (j = i; j < (i + 2); j++) {
4624                        if (grph_int[j] & GRPH_PFLIP_INT_OCCURRED)
4625                                WREG32(GRPH_INT_STATUS + crtc_offsets[j],
4626                                       GRPH_PFLIP_INT_CLEAR);
4627                }
4628
4629                for (j = i; j < (i + 2); j++) {
4630                        if (disp_int[j] & LB_D1_VBLANK_INTERRUPT)
4631                                WREG32(VBLANK_STATUS + crtc_offsets[j],
4632                                       VBLANK_ACK);
4633                        if (disp_int[j] & LB_D1_VLINE_INTERRUPT)
4634                                WREG32(VLINE_STATUS + crtc_offsets[j],
4635                                       VLINE_ACK);
4636                }
4637        }
4638
4639        for (i = 0; i < 6; i++) {
4640                if (disp_int[i] & DC_HPD1_INTERRUPT)
4641                        WREG32_OR(DC_HPDx_INT_CONTROL(i), DC_HPDx_INT_ACK);
4642        }
4643
4644        for (i = 0; i < 6; i++) {
4645                if (disp_int[i] & DC_HPD1_RX_INTERRUPT)
4646                        WREG32_OR(DC_HPDx_INT_CONTROL(i), DC_HPDx_RX_INT_ACK);
4647        }
4648
4649        for (i = 0; i < 6; i++) {
4650                if (afmt_status[i] & AFMT_AZ_FORMAT_WTRIG)
4651                        WREG32_OR(AFMT_AUDIO_PACKET_CONTROL + crtc_offsets[i],
4652                                  AFMT_AZ_FORMAT_WTRIG_ACK);
4653        }
4654}
4655
4656static void evergreen_irq_disable(struct radeon_device *rdev)
4657{
4658        r600_disable_interrupts(rdev);
4659        /* Wait and acknowledge irq */
4660        mdelay(1);
4661        evergreen_irq_ack(rdev);
4662        evergreen_disable_interrupt_state(rdev);
4663}
4664
4665void evergreen_irq_suspend(struct radeon_device *rdev)
4666{
4667        evergreen_irq_disable(rdev);
4668        r600_rlc_stop(rdev);
4669}
4670
4671static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4672{
4673        u32 wptr, tmp;
4674
4675        if (rdev->wb.enabled)
4676                wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4677        else
4678                wptr = RREG32(IH_RB_WPTR);
4679
4680        if (wptr & RB_OVERFLOW) {
4681                wptr &= ~RB_OVERFLOW;
4682                /* When a ring buffer overflow happen start parsing interrupt
4683                 * from the last not overwritten vector (wptr + 16). Hopefully
4684                 * this should allow us to catchup.
4685                 */
4686                dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4687                         wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4688                rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4689                tmp = RREG32(IH_RB_CNTL);
4690                tmp |= IH_WPTR_OVERFLOW_CLEAR;
4691                WREG32(IH_RB_CNTL, tmp);
4692        }
4693        return (wptr & rdev->ih.ptr_mask);
4694}
4695
4696int evergreen_irq_process(struct radeon_device *rdev)
4697{
4698        u32 *disp_int = rdev->irq.stat_regs.evergreen.disp_int;
4699        u32 *afmt_status = rdev->irq.stat_regs.evergreen.afmt_status;
4700        u32 crtc_idx, hpd_idx, afmt_idx;
4701        u32 mask;
4702        u32 wptr;
4703        u32 rptr;
4704        u32 src_id, src_data;
4705        u32 ring_index;
4706        bool queue_hotplug = false;
4707        bool queue_hdmi = false;
4708        bool queue_dp = false;
4709        bool queue_thermal = false;
4710        u32 status, addr;
4711        const char *event_name;
4712
4713        if (!rdev->ih.enabled || rdev->shutdown)
4714                return IRQ_NONE;
4715
4716        wptr = evergreen_get_ih_wptr(rdev);
4717
4718restart_ih:
4719        /* is somebody else already processing irqs? */
4720        if (atomic_xchg(&rdev->ih.lock, 1))
4721                return IRQ_NONE;
4722
4723        rptr = rdev->ih.rptr;
4724        DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4725
4726        /* Order reading of wptr vs. reading of IH ring data */
4727        rmb();
4728
4729        /* display interrupts */
4730        evergreen_irq_ack(rdev);
4731
4732        while (rptr != wptr) {
4733                /* wptr/rptr are in bytes! */
4734                ring_index = rptr / 4;
4735                src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4736                src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4737
4738                switch (src_id) {
4739                case 1: /* D1 vblank/vline */
4740                case 2: /* D2 vblank/vline */
4741                case 3: /* D3 vblank/vline */
4742                case 4: /* D4 vblank/vline */
4743                case 5: /* D5 vblank/vline */
4744                case 6: /* D6 vblank/vline */
4745                        crtc_idx = src_id - 1;
4746
4747                        if (src_data == 0) { /* vblank */
4748                                mask = LB_D1_VBLANK_INTERRUPT;
4749                                event_name = "vblank";
4750
4751                                if (rdev->irq.crtc_vblank_int[crtc_idx]) {
4752                                        drm_handle_vblank(rdev->ddev, crtc_idx);
4753                                        rdev->pm.vblank_sync = true;
4754                                        wake_up(&rdev->irq.vblank_queue);
4755                                }
4756                                if (atomic_read(&rdev->irq.pflip[crtc_idx])) {
4757                                        radeon_crtc_handle_vblank(rdev,
4758                                                                  crtc_idx);
4759                                }
4760
4761                        } else if (src_data == 1) { /* vline */
4762                                mask = LB_D1_VLINE_INTERRUPT;
4763                                event_name = "vline";
4764                        } else {
4765                                DRM_DEBUG("Unhandled interrupt: %d %d\n",
4766                                          src_id, src_data);
4767                                break;
4768                        }
4769
4770                        if (!(disp_int[crtc_idx] & mask)) {
4771                                DRM_DEBUG("IH: D%d %s - IH event w/o asserted irq bit?\n",
4772                                          crtc_idx + 1, event_name);
4773                        }
4774
4775                        disp_int[crtc_idx] &= ~mask;
4776                        DRM_DEBUG("IH: D%d %s\n", crtc_idx + 1, event_name);
4777
4778                        break;
4779                case 8: /* D1 page flip */
4780                case 10: /* D2 page flip */
4781                case 12: /* D3 page flip */
4782                case 14: /* D4 page flip */
4783                case 16: /* D5 page flip */
4784                case 18: /* D6 page flip */
4785                        DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
4786                        if (radeon_use_pflipirq > 0)
4787                                radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
4788                        break;
4789                case 42: /* HPD hotplug */
4790                        if (src_data <= 5) {
4791                                hpd_idx = src_data;
4792                                mask = DC_HPD1_INTERRUPT;
4793                                queue_hotplug = true;
4794                                event_name = "HPD";
4795
4796                        } else if (src_data <= 11) {
4797                                hpd_idx = src_data - 6;
4798                                mask = DC_HPD1_RX_INTERRUPT;
4799                                queue_dp = true;
4800                                event_name = "HPD_RX";
4801
4802                        } else {
4803                                DRM_DEBUG("Unhandled interrupt: %d %d\n",
4804                                          src_id, src_data);
4805                                break;
4806                        }
4807
4808                        if (!(disp_int[hpd_idx] & mask))
4809                                DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
4810
4811                        disp_int[hpd_idx] &= ~mask;
4812                        DRM_DEBUG("IH: %s%d\n", event_name, hpd_idx + 1);
4813
4814                        break;
4815                case 44: /* hdmi */
4816                        afmt_idx = src_data;
4817                        if (!(afmt_status[afmt_idx] & AFMT_AZ_FORMAT_WTRIG))
4818                                DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
4819
4820                        if (afmt_idx > 5) {
4821                                DRM_ERROR("Unhandled interrupt: %d %d\n",
4822                                          src_id, src_data);
4823                                break;
4824                        }
4825                        afmt_status[afmt_idx] &= ~AFMT_AZ_FORMAT_WTRIG;
4826                        queue_hdmi = true;
4827                        DRM_DEBUG("IH: HDMI%d\n", afmt_idx + 1);
4828                        break;
4829                case 96:
4830                        DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
4831                        WREG32(SRBM_INT_ACK, 0x1);
4832                        break;
4833                case 124: /* UVD */
4834                        DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4835                        radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4836                        break;
4837                case 146:
4838                case 147:
4839                        addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
4840                        status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
4841                        /* reset addr and status */
4842                        WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4843                        if (addr == 0x0 && status == 0x0)
4844                                break;
4845                        dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4846                        dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
4847                                addr);
4848                        dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4849                                status);
4850                        cayman_vm_decode_fault(rdev, status, addr);
4851                        break;
4852                case 176: /* CP_INT in ring buffer */
4853                case 177: /* CP_INT in IB1 */
4854                case 178: /* CP_INT in IB2 */
4855                        DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4856                        radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4857                        break;
4858                case 181: /* CP EOP event */
4859                        DRM_DEBUG("IH: CP EOP\n");
4860                        if (rdev->family >= CHIP_CAYMAN) {
4861                                switch (src_data) {
4862                                case 0:
4863                                        radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4864                                        break;
4865                                case 1:
4866                                        radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4867                                        break;
4868                                case 2:
4869                                        radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4870                                        break;
4871                                }
4872                        } else
4873                                radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4874                        break;
4875                case 224: /* DMA trap event */
4876                        DRM_DEBUG("IH: DMA trap\n");
4877                        radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4878                        break;
4879                case 230: /* thermal low to high */
4880                        DRM_DEBUG("IH: thermal low to high\n");
4881                        rdev->pm.dpm.thermal.high_to_low = false;
4882                        queue_thermal = true;
4883                        break;
4884                case 231: /* thermal high to low */
4885                        DRM_DEBUG("IH: thermal high to low\n");
4886                        rdev->pm.dpm.thermal.high_to_low = true;
4887                        queue_thermal = true;
4888                        break;
4889                case 233: /* GUI IDLE */
4890                        DRM_DEBUG("IH: GUI idle\n");
4891                        break;
4892                case 244: /* DMA trap event */
4893                        if (rdev->family >= CHIP_CAYMAN) {
4894                                DRM_DEBUG("IH: DMA1 trap\n");
4895                                radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4896                        }
4897                        break;
4898                default:
4899                        DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4900                        break;
4901                }
4902
4903                /* wptr/rptr are in bytes! */
4904                rptr += 16;
4905                rptr &= rdev->ih.ptr_mask;
4906                WREG32(IH_RB_RPTR, rptr);
4907        }
4908        if (queue_dp)
4909                schedule_work(&rdev->dp_work);
4910        if (queue_hotplug)
4911                schedule_delayed_work(&rdev->hotplug_work, 0);
4912        if (queue_hdmi)
4913                schedule_work(&rdev->audio_work);
4914        if (queue_thermal && rdev->pm.dpm_enabled)
4915                schedule_work(&rdev->pm.dpm.thermal.work);
4916        rdev->ih.rptr = rptr;
4917        atomic_set(&rdev->ih.lock, 0);
4918
4919        /* make sure wptr hasn't changed while processing */
4920        wptr = evergreen_get_ih_wptr(rdev);
4921        if (wptr != rptr)
4922                goto restart_ih;
4923
4924        return IRQ_HANDLED;
4925}
4926
4927static void evergreen_uvd_init(struct radeon_device *rdev)
4928{
4929        int r;
4930
4931        if (!rdev->has_uvd)
4932                return;
4933
4934        r = radeon_uvd_init(rdev);
4935        if (r) {
4936                dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
4937                /*
4938                 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
4939                 * to early fails uvd_v2_2_resume() and thus nothing happens
4940                 * there. So it is pointless to try to go through that code
4941                 * hence why we disable uvd here.
4942                 */
4943                rdev->has_uvd = 0;
4944                return;
4945        }
4946        rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
4947        r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
4948}
4949
4950static void evergreen_uvd_start(struct radeon_device *rdev)
4951{
4952        int r;
4953
4954        if (!rdev->has_uvd)
4955                return;
4956
4957        r = uvd_v2_2_resume(rdev);
4958        if (r) {
4959                dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
4960                goto error;
4961        }
4962        r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
4963        if (r) {
4964                dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
4965                goto error;
4966        }
4967        return;
4968
4969error:
4970        rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
4971}
4972
4973static void evergreen_uvd_resume(struct radeon_device *rdev)
4974{
4975        struct radeon_ring *ring;
4976        int r;
4977
4978        if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
4979                return;
4980
4981        ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
4982        r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
4983        if (r) {
4984                dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
4985                return;
4986        }
4987        r = uvd_v1_0_init(rdev);
4988        if (r) {
4989                dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
4990                return;
4991        }
4992}
4993
4994static int evergreen_startup(struct radeon_device *rdev)
4995{
4996        struct radeon_ring *ring;
4997        int r;
4998
4999        /* enable pcie gen2 link */
5000        evergreen_pcie_gen2_enable(rdev);
5001        /* enable aspm */
5002        evergreen_program_aspm(rdev);
5003
5004        /* scratch needs to be initialized before MC */
5005        r = r600_vram_scratch_init(rdev);
5006        if (r)
5007                return r;
5008
5009        evergreen_mc_program(rdev);
5010
5011        if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5012                r = ni_mc_load_microcode(rdev);
5013                if (r) {
5014                        DRM_ERROR("Failed to load MC firmware!\n");
5015                        return r;
5016                }
5017        }
5018
5019        if (rdev->flags & RADEON_IS_AGP) {
5020                evergreen_agp_enable(rdev);
5021        } else {
5022                r = evergreen_pcie_gart_enable(rdev);
5023                if (r)
5024                        return r;
5025        }
5026        evergreen_gpu_init(rdev);
5027
5028        /* allocate rlc buffers */
5029        if (rdev->flags & RADEON_IS_IGP) {
5030                rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5031                rdev->rlc.reg_list_size =
5032                        (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5033                rdev->rlc.cs_data = evergreen_cs_data;
5034                r = sumo_rlc_init(rdev);
5035                if (r) {
5036                        DRM_ERROR("Failed to init rlc BOs!\n");
5037                        return r;
5038                }
5039        }
5040
5041        /* allocate wb buffer */
5042        r = radeon_wb_init(rdev);
5043        if (r)
5044                return r;
5045
5046        r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5047        if (r) {
5048                dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5049                return r;
5050        }
5051
5052        r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5053        if (r) {
5054                dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5055                return r;
5056        }
5057
5058        evergreen_uvd_start(rdev);
5059
5060        /* Enable IRQ */
5061        if (!rdev->irq.installed) {
5062                r = radeon_irq_kms_init(rdev);
5063                if (r)
5064                        return r;
5065        }
5066
5067        r = r600_irq_init(rdev);
5068        if (r) {
5069                DRM_ERROR("radeon: IH init failed (%d).\n", r);
5070                radeon_irq_kms_fini(rdev);
5071                return r;
5072        }
5073        evergreen_irq_set(rdev);
5074
5075        ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5076        r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5077                             RADEON_CP_PACKET2);
5078        if (r)
5079                return r;
5080
5081        ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5082        r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5083                             DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5084        if (r)
5085                return r;
5086
5087        r = evergreen_cp_load_microcode(rdev);
5088        if (r)
5089                return r;
5090        r = evergreen_cp_resume(rdev);
5091        if (r)
5092                return r;
5093        r = r600_dma_resume(rdev);
5094        if (r)
5095                return r;
5096
5097        evergreen_uvd_resume(rdev);
5098
5099        r = radeon_ib_pool_init(rdev);
5100        if (r) {
5101                dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5102                return r;
5103        }
5104
5105        r = radeon_audio_init(rdev);
5106        if (r) {
5107                DRM_ERROR("radeon: audio init failed\n");
5108                return r;
5109        }
5110
5111        return 0;
5112}
5113
5114int evergreen_resume(struct radeon_device *rdev)
5115{
5116        int r;
5117
5118        /* reset the asic, the gfx blocks are often in a bad state
5119         * after the driver is unloaded or after a resume
5120         */
5121        if (radeon_asic_reset(rdev))
5122                dev_warn(rdev->dev, "GPU reset failed !\n");
5123        /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5124         * posting will perform necessary task to bring back GPU into good
5125         * shape.
5126         */
5127        /* post card */
5128        atom_asic_init(rdev->mode_info.atom_context);
5129
5130        /* init golden registers */
5131        evergreen_init_golden_registers(rdev);
5132
5133        if (rdev->pm.pm_method == PM_METHOD_DPM)
5134                radeon_pm_resume(rdev);
5135
5136        rdev->accel_working = true;
5137        r = evergreen_startup(rdev);
5138        if (r) {
5139                DRM_ERROR("evergreen startup failed on resume\n");
5140                rdev->accel_working = false;
5141                return r;
5142        }
5143
5144        return r;
5145
5146}
5147
5148int evergreen_suspend(struct radeon_device *rdev)
5149{
5150        radeon_pm_suspend(rdev);
5151        radeon_audio_fini(rdev);
5152        if (rdev->has_uvd) {
5153                uvd_v1_0_fini(rdev);
5154                radeon_uvd_suspend(rdev);
5155        }
5156        r700_cp_stop(rdev);
5157        r600_dma_stop(rdev);
5158        evergreen_irq_suspend(rdev);
5159        radeon_wb_disable(rdev);
5160        evergreen_pcie_gart_disable(rdev);
5161
5162        return 0;
5163}
5164
5165/* Plan is to move initialization in that function and use
5166 * helper function so that radeon_device_init pretty much
5167 * do nothing more than calling asic specific function. This
5168 * should also allow to remove a bunch of callback function
5169 * like vram_info.
5170 */
5171int evergreen_init(struct radeon_device *rdev)
5172{
5173        int r;
5174
5175        /* Read BIOS */
5176        if (!radeon_get_bios(rdev)) {
5177                if (ASIC_IS_AVIVO(rdev))
5178                        return -EINVAL;
5179        }
5180        /* Must be an ATOMBIOS */
5181        if (!rdev->is_atom_bios) {
5182                dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5183                return -EINVAL;
5184        }
5185        r = radeon_atombios_init(rdev);
5186        if (r)
5187                return r;
5188        /* reset the asic, the gfx blocks are often in a bad state
5189         * after the driver is unloaded or after a resume
5190         */
5191        if (radeon_asic_reset(rdev))
5192                dev_warn(rdev->dev, "GPU reset failed !\n");
5193        /* Post card if necessary */
5194        if (!radeon_card_posted(rdev)) {
5195                if (!rdev->bios) {
5196                        dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5197                        return -EINVAL;
5198                }
5199                DRM_INFO("GPU not posted. posting now...\n");
5200                atom_asic_init(rdev->mode_info.atom_context);
5201        }
5202        /* init golden registers */
5203        evergreen_init_golden_registers(rdev);
5204        /* Initialize scratch registers */
5205        r600_scratch_init(rdev);
5206        /* Initialize surface registers */
5207        radeon_surface_init(rdev);
5208        /* Initialize clocks */
5209        radeon_get_clock_info(rdev->ddev);
5210        /* Fence driver */
5211        r = radeon_fence_driver_init(rdev);
5212        if (r)
5213                return r;
5214        /* initialize AGP */
5215        if (rdev->flags & RADEON_IS_AGP) {
5216                r = radeon_agp_init(rdev);
5217                if (r)
5218                        radeon_agp_disable(rdev);
5219        }
5220        /* initialize memory controller */
5221        r = evergreen_mc_init(rdev);
5222        if (r)
5223                return r;
5224        /* Memory manager */
5225        r = radeon_bo_init(rdev);
5226        if (r)
5227                return r;
5228
5229        if (ASIC_IS_DCE5(rdev)) {
5230                if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5231                        r = ni_init_microcode(rdev);
5232                        if (r) {
5233                                DRM_ERROR("Failed to load firmware!\n");
5234                                return r;
5235                        }
5236                }
5237        } else {
5238                if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5239                        r = r600_init_microcode(rdev);
5240                        if (r) {
5241                                DRM_ERROR("Failed to load firmware!\n");
5242                                return r;
5243                        }
5244                }
5245        }
5246
5247        /* Initialize power management */
5248        radeon_pm_init(rdev);
5249
5250        rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5251        r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5252
5253        rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5254        r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5255
5256        evergreen_uvd_init(rdev);
5257
5258        rdev->ih.ring_obj = NULL;
5259        r600_ih_ring_init(rdev, 64 * 1024);
5260
5261        r = r600_pcie_gart_init(rdev);
5262        if (r)
5263                return r;
5264
5265        rdev->accel_working = true;
5266        r = evergreen_startup(rdev);
5267        if (r) {
5268                dev_err(rdev->dev, "disabling GPU acceleration\n");
5269                r700_cp_fini(rdev);
5270                r600_dma_fini(rdev);
5271                r600_irq_fini(rdev);
5272                if (rdev->flags & RADEON_IS_IGP)
5273                        sumo_rlc_fini(rdev);
5274                radeon_wb_fini(rdev);
5275                radeon_ib_pool_fini(rdev);
5276                radeon_irq_kms_fini(rdev);
5277                evergreen_pcie_gart_fini(rdev);
5278                rdev->accel_working = false;
5279        }
5280
5281        /* Don't start up if the MC ucode is missing on BTC parts.
5282         * The default clocks and voltages before the MC ucode
5283         * is loaded are not suffient for advanced operations.
5284         */
5285        if (ASIC_IS_DCE5(rdev)) {
5286                if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5287                        DRM_ERROR("radeon: MC ucode required for NI+.\n");
5288                        return -EINVAL;
5289                }
5290        }
5291
5292        return 0;
5293}
5294
5295void evergreen_fini(struct radeon_device *rdev)
5296{
5297        radeon_pm_fini(rdev);
5298        radeon_audio_fini(rdev);
5299        r700_cp_fini(rdev);
5300        r600_dma_fini(rdev);
5301        r600_irq_fini(rdev);
5302        if (rdev->flags & RADEON_IS_IGP)
5303                sumo_rlc_fini(rdev);
5304        radeon_wb_fini(rdev);
5305        radeon_ib_pool_fini(rdev);
5306        radeon_irq_kms_fini(rdev);
5307        uvd_v1_0_fini(rdev);
5308        radeon_uvd_fini(rdev);
5309        evergreen_pcie_gart_fini(rdev);
5310        r600_vram_scratch_fini(rdev);
5311        radeon_gem_fini(rdev);
5312        radeon_fence_driver_fini(rdev);
5313        radeon_agp_fini(rdev);
5314        radeon_bo_fini(rdev);
5315        radeon_atombios_fini(rdev);
5316        kfree(rdev->bios);
5317        rdev->bios = NULL;
5318}
5319
5320void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5321{
5322        u32 link_width_cntl, speed_cntl;
5323
5324        if (radeon_pcie_gen2 == 0)
5325                return;
5326
5327        if (rdev->flags & RADEON_IS_IGP)
5328                return;
5329
5330        if (!(rdev->flags & RADEON_IS_PCIE))
5331                return;
5332
5333        /* x2 cards have a special sequence */
5334        if (ASIC_IS_X2(rdev))
5335                return;
5336
5337        if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5338                (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5339                return;
5340
5341        speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5342        if (speed_cntl & LC_CURRENT_DATA_RATE) {
5343                DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5344                return;
5345        }
5346
5347        DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5348
5349        if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5350            (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5351
5352                link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5353                link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5354                WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5355
5356                speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5357                speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5358                WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5359
5360                speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5361                speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5362                WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5363
5364                speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5365                speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5366                WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5367
5368                speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5369                speed_cntl |= LC_GEN2_EN_STRAP;
5370                WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5371
5372        } else {
5373                link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5374                /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5375                if (1)
5376                        link_width_cntl |= LC_UPCONFIGURE_DIS;
5377                else
5378                        link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5379                WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5380        }
5381}
5382
5383void evergreen_program_aspm(struct radeon_device *rdev)
5384{
5385        u32 data, orig;
5386        u32 pcie_lc_cntl, pcie_lc_cntl_old;
5387        bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5388        /* fusion_platform = true
5389         * if the system is a fusion system
5390         * (APU or DGPU in a fusion system).
5391         * todo: check if the system is a fusion platform.
5392         */
5393        bool fusion_platform = false;
5394
5395        if (radeon_aspm == 0)
5396                return;
5397
5398        if (!(rdev->flags & RADEON_IS_PCIE))
5399                return;
5400
5401        switch (rdev->family) {
5402        case CHIP_CYPRESS:
5403        case CHIP_HEMLOCK:
5404        case CHIP_JUNIPER:
5405        case CHIP_REDWOOD:
5406        case CHIP_CEDAR:
5407        case CHIP_SUMO:
5408        case CHIP_SUMO2:
5409        case CHIP_PALM:
5410        case CHIP_ARUBA:
5411                disable_l0s = true;
5412                break;
5413        default:
5414                disable_l0s = false;
5415                break;
5416        }
5417
5418        if (rdev->flags & RADEON_IS_IGP)
5419                fusion_platform = true; /* XXX also dGPUs in a fusion system */
5420
5421        data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5422        if (fusion_platform)
5423                data &= ~MULTI_PIF;
5424        else
5425                data |= MULTI_PIF;
5426        if (data != orig)
5427                WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5428
5429        data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5430        if (fusion_platform)
5431                data &= ~MULTI_PIF;
5432        else
5433                data |= MULTI_PIF;
5434        if (data != orig)
5435                WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5436
5437        pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5438        pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5439        if (!disable_l0s) {
5440                if (rdev->family >= CHIP_BARTS)
5441                        pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5442                else
5443                        pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5444        }
5445
5446        if (!disable_l1) {
5447                if (rdev->family >= CHIP_BARTS)
5448                        pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5449                else
5450                        pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5451
5452                if (!disable_plloff_in_l1) {
5453                        data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5454                        data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5455                        data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5456                        if (data != orig)
5457                                WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5458
5459                        data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5460                        data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5461                        data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5462                        if (data != orig)
5463                                WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5464
5465                        data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5466                        data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5467                        data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5468                        if (data != orig)
5469                                WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5470
5471                        data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5472                        data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5473                        data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5474                        if (data != orig)
5475                                WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5476
5477                        if (rdev->family >= CHIP_BARTS) {
5478                                data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5479                                data &= ~PLL_RAMP_UP_TIME_0_MASK;
5480                                data |= PLL_RAMP_UP_TIME_0(4);
5481                                if (data != orig)
5482                                        WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5483
5484                                data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5485                                data &= ~PLL_RAMP_UP_TIME_1_MASK;
5486                                data |= PLL_RAMP_UP_TIME_1(4);
5487                                if (data != orig)
5488                                        WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5489
5490                                data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5491                                data &= ~PLL_RAMP_UP_TIME_0_MASK;
5492                                data |= PLL_RAMP_UP_TIME_0(4);
5493                                if (data != orig)
5494                                        WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5495
5496                                data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5497                                data &= ~PLL_RAMP_UP_TIME_1_MASK;
5498                                data |= PLL_RAMP_UP_TIME_1(4);
5499                                if (data != orig)
5500                                        WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5501                        }
5502
5503                        data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5504                        data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5505                        data |= LC_DYN_LANES_PWR_STATE(3);
5506                        if (data != orig)
5507                                WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5508
5509                        if (rdev->family >= CHIP_BARTS) {
5510                                data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5511                                data &= ~LS2_EXIT_TIME_MASK;
5512                                data |= LS2_EXIT_TIME(1);
5513                                if (data != orig)
5514                                        WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5515
5516                                data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5517                                data &= ~LS2_EXIT_TIME_MASK;
5518                                data |= LS2_EXIT_TIME(1);
5519                                if (data != orig)
5520                                        WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5521                        }
5522                }
5523        }
5524
5525        /* evergreen parts only */
5526        if (rdev->family < CHIP_BARTS)
5527                pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5528
5529        if (pcie_lc_cntl != pcie_lc_cntl_old)
5530                WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5531}
5532