uboot/arch/arm/mach-sunxi/dram_sun4i.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0+
   2/*
   3 * sunxi DRAM controller initialization
   4 * (C) Copyright 2012 Henrik Nordstrom <henrik@henriknordstrom.net>
   5 * (C) Copyright 2013 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
   6 *
   7 * Based on sun4i Linux kernel sources mach-sunxi/pm/standby/dram*.c
   8 * and earlier U-Boot Allwinner A10 SPL work
   9 *
  10 * (C) Copyright 2007-2012
  11 * Allwinner Technology Co., Ltd. <www.allwinnertech.com>
  12 * Berg Xing <bergxing@allwinnertech.com>
  13 * Tom Cubie <tangliang@allwinnertech.com>
  14 */
  15
  16/*
  17 * Unfortunately the only documentation we have on the sun7i DRAM
  18 * controller is Allwinner boot0 + boot1 code, and that code uses
  19 * magic numbers & shifts with no explanations. Hence this code is
  20 * rather undocumented and full of magic.
  21 */
  22
  23#include <common.h>
  24#include <asm/io.h>
  25#include <asm/arch/clock.h>
  26#include <asm/arch/dram.h>
  27#include <asm/arch/timer.h>
  28#include <asm/arch/sys_proto.h>
  29
  30#define CPU_CFG_CHIP_VER(n) ((n) << 6)
  31#define CPU_CFG_CHIP_VER_MASK CPU_CFG_CHIP_VER(0x3)
  32#define CPU_CFG_CHIP_REV_A 0x0
  33#define CPU_CFG_CHIP_REV_C1 0x1
  34#define CPU_CFG_CHIP_REV_C2 0x2
  35#define CPU_CFG_CHIP_REV_B 0x3
  36
  37/*
  38 * Wait up to 1s for mask to be clear in given reg.
  39 */
  40static inline void await_bits_clear(u32 *reg, u32 mask)
  41{
  42        mctl_await_completion(reg, mask, 0);
  43}
  44
  45/*
  46 * Wait up to 1s for mask to be set in given reg.
  47 */
  48static inline void await_bits_set(u32 *reg, u32 mask)
  49{
  50        mctl_await_completion(reg, mask, mask);
  51}
  52
  53/*
  54 * This performs the external DRAM reset by driving the RESET pin low and
  55 * then high again. According to the DDR3 spec, the RESET pin needs to be
  56 * kept low for at least 200 us.
  57 */
  58static void mctl_ddr3_reset(void)
  59{
  60        struct sunxi_dram_reg *dram =
  61                        (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
  62
  63#ifdef CONFIG_MACH_SUN4I
  64        struct sunxi_timer_reg *timer =
  65                        (struct sunxi_timer_reg *)SUNXI_TIMER_BASE;
  66        u32 reg_val;
  67
  68        writel(0, &timer->cpu_cfg);
  69        reg_val = readl(&timer->cpu_cfg);
  70
  71        if ((reg_val & CPU_CFG_CHIP_VER_MASK) !=
  72            CPU_CFG_CHIP_VER(CPU_CFG_CHIP_REV_A)) {
  73                setbits_le32(&dram->mcr, DRAM_MCR_RESET);
  74                udelay(200);
  75                clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
  76        } else
  77#endif
  78        {
  79                clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
  80                udelay(200);
  81                setbits_le32(&dram->mcr, DRAM_MCR_RESET);
  82        }
  83        /* After the RESET pin is de-asserted, the DDR3 spec requires to wait
  84         * for additional 500 us before driving the CKE pin (Clock Enable)
  85         * high. The duration of this delay can be configured in the SDR_IDCR
  86         * (Initialization Delay Configuration Register) and applied
  87         * automatically by the DRAM controller during the DDR3 initialization
  88         * step. But SDR_IDCR has limited range on sun4i/sun5i hardware and
  89         * can't provide sufficient delay at DRAM clock frequencies higher than
  90         * 524 MHz (while Allwinner A13 supports DRAM clock frequency up to
  91         * 533 MHz according to the datasheet). Additionally, there is no
  92         * official documentation for the SDR_IDCR register anywhere, and
  93         * there is always a chance that we are interpreting it wrong.
  94         * Better be safe than sorry, so add an explicit delay here. */
  95        udelay(500);
  96}
  97
  98static void mctl_set_drive(void)
  99{
 100        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 101
 102#ifdef CONFIG_MACH_SUN7I
 103        clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3) | (0x3 << 28),
 104#else
 105        clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3),
 106#endif
 107                        DRAM_MCR_MODE_EN(0x3) |
 108                        0xffc);
 109}
 110
 111static void mctl_itm_disable(void)
 112{
 113        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 114
 115        clrsetbits_le32(&dram->ccr, DRAM_CCR_INIT, DRAM_CCR_ITM_OFF);
 116}
 117
 118static void mctl_itm_enable(void)
 119{
 120        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 121
 122        clrbits_le32(&dram->ccr, DRAM_CCR_ITM_OFF);
 123}
 124
 125static void mctl_itm_reset(void)
 126{
 127        mctl_itm_disable();
 128        udelay(1); /* ITM reset needs a bit of delay */
 129        mctl_itm_enable();
 130        udelay(1);
 131}
 132
 133static void mctl_enable_dll0(u32 phase)
 134{
 135        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 136
 137        clrsetbits_le32(&dram->dllcr[0], 0x3f << 6,
 138                        ((phase >> 16) & 0x3f) << 6);
 139        clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET, DRAM_DLLCR_DISABLE);
 140        udelay(2);
 141
 142        clrbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET | DRAM_DLLCR_DISABLE);
 143        udelay(22);
 144
 145        clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_DISABLE, DRAM_DLLCR_NRESET);
 146        udelay(22);
 147}
 148
 149/* Get the number of DDR byte lanes */
 150static u32 mctl_get_number_of_lanes(void)
 151{
 152        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 153        if ((readl(&dram->dcr) & DRAM_DCR_BUS_WIDTH_MASK) ==
 154                                DRAM_DCR_BUS_WIDTH(DRAM_DCR_BUS_WIDTH_32BIT))
 155                return 4;
 156        else
 157                return 2;
 158}
 159
 160/*
 161 * Note: This differs from pm/standby in that it checks the bus width
 162 */
 163static void mctl_enable_dllx(u32 phase)
 164{
 165        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 166        u32 i, number_of_lanes;
 167
 168        number_of_lanes = mctl_get_number_of_lanes();
 169
 170        for (i = 1; i <= number_of_lanes; i++) {
 171                clrsetbits_le32(&dram->dllcr[i], 0xf << 14,
 172                                (phase & 0xf) << 14);
 173                clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET,
 174                                DRAM_DLLCR_DISABLE);
 175                phase >>= 4;
 176        }
 177        udelay(2);
 178
 179        for (i = 1; i <= number_of_lanes; i++)
 180                clrbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET |
 181                             DRAM_DLLCR_DISABLE);
 182        udelay(22);
 183
 184        for (i = 1; i <= number_of_lanes; i++)
 185                clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_DISABLE,
 186                                DRAM_DLLCR_NRESET);
 187        udelay(22);
 188}
 189
 190static u32 hpcr_value[32] = {
 191#ifdef CONFIG_MACH_SUN5I
 192        0, 0, 0, 0,
 193        0, 0, 0, 0,
 194        0, 0, 0, 0,
 195        0, 0, 0, 0,
 196        0x1031, 0x1031, 0x0735, 0x1035,
 197        0x1035, 0x0731, 0x1031, 0,
 198        0x0301, 0x0301, 0x0301, 0x0301,
 199        0x0301, 0x0301, 0x0301, 0
 200#endif
 201#ifdef CONFIG_MACH_SUN4I
 202        0x0301, 0x0301, 0x0301, 0x0301,
 203        0x0301, 0x0301, 0, 0,
 204        0, 0, 0, 0,
 205        0, 0, 0, 0,
 206        0x1031, 0x1031, 0x0735, 0x5031,
 207        0x1035, 0x0731, 0x1031, 0x0735,
 208        0x1035, 0x1031, 0x0731, 0x1035,
 209        0x1031, 0x0301, 0x0301, 0x0731
 210#endif
 211#ifdef CONFIG_MACH_SUN7I
 212        0x0301, 0x0301, 0x0301, 0x0301,
 213        0x0301, 0x0301, 0x0301, 0x0301,
 214        0, 0, 0, 0,
 215        0, 0, 0, 0,
 216        0x1031, 0x1031, 0x0735, 0x1035,
 217        0x1035, 0x0731, 0x1031, 0x0735,
 218        0x1035, 0x1031, 0x0731, 0x1035,
 219        0x0001, 0x1031, 0, 0x1031
 220        /* last row differs from boot0 source table
 221         * 0x1031, 0x0301, 0x0301, 0x0731
 222         * but boot0 code skips #28 and #30, and sets #29 and #31 to the
 223         * value from #28 entry (0x1031)
 224         */
 225#endif
 226};
 227
 228static void mctl_configure_hostport(void)
 229{
 230        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 231        u32 i;
 232
 233        for (i = 0; i < 32; i++)
 234                writel(hpcr_value[i], &dram->hpcr[i]);
 235}
 236
 237static void mctl_setup_dram_clock(u32 clk, u32 mbus_clk)
 238{
 239        u32 reg_val;
 240        struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
 241        u32 pll5p_clk, pll6x_clk;
 242        u32 pll5p_div, pll6x_div;
 243        u32 pll5p_rate, pll6x_rate;
 244
 245        /* setup DRAM PLL */
 246        reg_val = readl(&ccm->pll5_cfg);
 247        reg_val &= ~CCM_PLL5_CTRL_M_MASK;               /* set M to 0 (x1) */
 248        reg_val &= ~CCM_PLL5_CTRL_K_MASK;               /* set K to 0 (x1) */
 249        reg_val &= ~CCM_PLL5_CTRL_N_MASK;               /* set N to 0 (x0) */
 250        reg_val &= ~CCM_PLL5_CTRL_P_MASK;               /* set P to 0 (x1) */
 251#ifdef CONFIG_OLD_SUNXI_KERNEL_COMPAT
 252        /* Old kernels are hardcoded to P=1 (divide by 2) */
 253        reg_val |= CCM_PLL5_CTRL_P(1);
 254#endif
 255        if (clk >= 540 && clk < 552) {
 256                /* dram = 540MHz */
 257                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
 258                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
 259                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(15));
 260        } else if (clk >= 512 && clk < 528) {
 261                /* dram = 512MHz */
 262                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
 263                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(4));
 264                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(16));
 265        } else if (clk >= 496 && clk < 504) {
 266                /* dram = 496MHz */
 267                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
 268                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
 269                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(31));
 270        } else if (clk >= 468 && clk < 480) {
 271                /* dram = 468MHz */
 272                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
 273                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
 274                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(13));
 275        } else if (clk >= 396 && clk < 408) {
 276                /* dram = 396MHz */
 277                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
 278                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
 279                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(11));
 280        } else  {
 281                /* any other frequency that is a multiple of 24 */
 282                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
 283                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
 284                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(clk / 24));
 285        }
 286        reg_val &= ~CCM_PLL5_CTRL_VCO_GAIN;             /* PLL VCO Gain off */
 287        reg_val |= CCM_PLL5_CTRL_EN;                    /* PLL On */
 288        writel(reg_val, &ccm->pll5_cfg);
 289        udelay(5500);
 290
 291        setbits_le32(&ccm->pll5_cfg, CCM_PLL5_CTRL_DDR_CLK);
 292
 293#if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN7I)
 294        /* reset GPS */
 295        clrbits_le32(&ccm->gps_clk_cfg, CCM_GPS_CTRL_RESET | CCM_GPS_CTRL_GATE);
 296        setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
 297        udelay(1);
 298        clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
 299#endif
 300
 301        /* setup MBUS clock */
 302        if (!mbus_clk)
 303                mbus_clk = 300;
 304
 305        /* PLL5P and PLL6 are the potential clock sources for MBUS */
 306        pll6x_clk = clock_get_pll6() / 1000000;
 307#ifdef CONFIG_MACH_SUN7I
 308        pll6x_clk *= 2; /* sun7i uses PLL6*2, sun5i uses just PLL6 */
 309#endif
 310        pll5p_clk = clock_get_pll5p() / 1000000;
 311        pll6x_div = DIV_ROUND_UP(pll6x_clk, mbus_clk);
 312        pll5p_div = DIV_ROUND_UP(pll5p_clk, mbus_clk);
 313        pll6x_rate = pll6x_clk / pll6x_div;
 314        pll5p_rate = pll5p_clk / pll5p_div;
 315
 316        if (pll6x_div <= 16 && pll6x_rate > pll5p_rate) {
 317                /* use PLL6 as the MBUS clock source */
 318                reg_val = CCM_MBUS_CTRL_GATE |
 319                          CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL6) |
 320                          CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
 321                          CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll6x_div));
 322        } else if (pll5p_div <= 16) {
 323                /* use PLL5P as the MBUS clock source */
 324                reg_val = CCM_MBUS_CTRL_GATE |
 325                          CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL5) |
 326                          CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
 327                          CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll5p_div));
 328        } else {
 329                panic("Bad mbus_clk\n");
 330        }
 331        writel(reg_val, &ccm->mbus_clk_cfg);
 332
 333        /*
 334         * open DRAMC AHB & DLL register clock
 335         * close it first
 336         */
 337#if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
 338        clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
 339#else
 340        clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
 341#endif
 342        udelay(22);
 343
 344        /* then open it */
 345#if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
 346        setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
 347#else
 348        setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
 349#endif
 350        udelay(22);
 351}
 352
 353/*
 354 * The data from rslrX and rdgrX registers (X=rank) is stored
 355 * in a single 32-bit value using the following format:
 356 *   bits [31:26] - DQS gating system latency for byte lane 3
 357 *   bits [25:24] - DQS gating phase select for byte lane 3
 358 *   bits [23:18] - DQS gating system latency for byte lane 2
 359 *   bits [17:16] - DQS gating phase select for byte lane 2
 360 *   bits [15:10] - DQS gating system latency for byte lane 1
 361 *   bits [ 9:8 ] - DQS gating phase select for byte lane 1
 362 *   bits [ 7:2 ] - DQS gating system latency for byte lane 0
 363 *   bits [ 1:0 ] - DQS gating phase select for byte lane 0
 364 */
 365static void mctl_set_dqs_gating_delay(int rank, u32 dqs_gating_delay)
 366{
 367        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 368        u32 lane, number_of_lanes = mctl_get_number_of_lanes();
 369        /* rank0 gating system latency (3 bits per lane: cycles) */
 370        u32 slr = readl(rank == 0 ? &dram->rslr0 : &dram->rslr1);
 371        /* rank0 gating phase select (2 bits per lane: 90, 180, 270, 360) */
 372        u32 dgr = readl(rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
 373        for (lane = 0; lane < number_of_lanes; lane++) {
 374                u32 tmp = dqs_gating_delay >> (lane * 8);
 375                slr &= ~(7 << (lane * 3));
 376                slr |= ((tmp >> 2) & 7) << (lane * 3);
 377                dgr &= ~(3 << (lane * 2));
 378                dgr |= (tmp & 3) << (lane * 2);
 379        }
 380        writel(slr, rank == 0 ? &dram->rslr0 : &dram->rslr1);
 381        writel(dgr, rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
 382}
 383
 384static int dramc_scan_readpipe(void)
 385{
 386        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 387        u32 reg_val;
 388
 389        /* data training trigger */
 390        clrbits_le32(&dram->csr, DRAM_CSR_FAILED);
 391        setbits_le32(&dram->ccr, DRAM_CCR_DATA_TRAINING);
 392
 393        /* check whether data training process has completed */
 394        await_bits_clear(&dram->ccr, DRAM_CCR_DATA_TRAINING);
 395
 396        /* check data training result */
 397        reg_val = readl(&dram->csr);
 398        if (reg_val & DRAM_CSR_FAILED)
 399                return -1;
 400
 401        return 0;
 402}
 403
 404static void dramc_clock_output_en(u32 on)
 405{
 406#if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
 407        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 408
 409        if (on)
 410                setbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
 411        else
 412                clrbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
 413#endif
 414#ifdef CONFIG_MACH_SUN4I
 415        struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
 416        if (on)
 417                setbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
 418        else
 419                clrbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
 420#endif
 421}
 422
 423/* tRFC in nanoseconds for different densities (from the DDR3 spec) */
 424static const u16 tRFC_DDR3_table[6] = {
 425        /* 256Mb    512Mb    1Gb      2Gb      4Gb      8Gb */
 426           90,      90,      110,     160,     300,     350
 427};
 428
 429static void dramc_set_autorefresh_cycle(u32 clk, u32 density)
 430{
 431        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 432        u32 tRFC, tREFI;
 433
 434        tRFC = (tRFC_DDR3_table[density] * clk + 999) / 1000;
 435        tREFI = (7987 * clk) >> 10;     /* <= 7.8us */
 436
 437        writel(DRAM_DRR_TREFI(tREFI) | DRAM_DRR_TRFC(tRFC), &dram->drr);
 438}
 439
 440/* Calculate the value for A11, A10, A9 bits in MR0 (write recovery) */
 441static u32 ddr3_write_recovery(u32 clk)
 442{
 443        u32 twr_ns = 15; /* DDR3 spec says that it is 15ns for all speed bins */
 444        u32 twr_ck = (twr_ns * clk + 999) / 1000;
 445        if (twr_ck < 5)
 446                return 1;
 447        else if (twr_ck <= 8)
 448                return twr_ck - 4;
 449        else if (twr_ck <= 10)
 450                return 5;
 451        else
 452                return 6;
 453}
 454
 455/*
 456 * If the dram->ppwrsctl (SDR_DPCR) register has the lowest bit set to 1, this
 457 * means that DRAM is currently in self-refresh mode and retaining the old
 458 * data. Since we have no idea what to do in this situation yet, just set this
 459 * register to 0 and initialize DRAM in the same way as on any normal reboot
 460 * (discarding whatever was stored there).
 461 *
 462 * Note: on sun7i hardware, the highest 16 bits need to be set to 0x1651 magic
 463 * value for this write operation to have any effect. On sun5i hadware this
 464 * magic value is not necessary. And on sun4i hardware the writes to this
 465 * register seem to have no effect at all.
 466 */
 467static void mctl_disable_power_save(void)
 468{
 469        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 470        writel(0x16510000, &dram->ppwrsctl);
 471}
 472
 473/*
 474 * After the DRAM is powered up or reset, the DDR3 spec requires to wait at
 475 * least 500 us before driving the CKE pin (Clock Enable) high. The dram->idct
 476 * (SDR_IDCR) register appears to configure this delay, which gets applied
 477 * right at the time when the DRAM initialization is activated in the
 478 * 'mctl_ddr3_initialize' function.
 479 */
 480static void mctl_set_cke_delay(void)
 481{
 482        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 483
 484        /* The CKE delay is represented in DRAM clock cycles, multiplied by N
 485         * (where N=2 for sun4i/sun5i and N=3 for sun7i). Here it is set to
 486         * the maximum possible value 0x1ffff, just like in the Allwinner's
 487         * boot0 bootloader. The resulting delay value is somewhere between
 488         * ~0.4 ms (sun5i with 648 MHz DRAM clock speed) and ~1.1 ms (sun7i
 489         * with 360 MHz DRAM clock speed). */
 490        setbits_le32(&dram->idcr, 0x1ffff);
 491}
 492
 493/*
 494 * This triggers the DRAM initialization. It performs sending the mode registers
 495 * to the DRAM among other things. Very likely the ZQCL command is also getting
 496 * executed (to do the initial impedance calibration on the DRAM side of the
 497 * wire). The memory controller and the PHY must be already configured before
 498 * calling this function.
 499 */
 500static void mctl_ddr3_initialize(void)
 501{
 502        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 503        setbits_le32(&dram->ccr, DRAM_CCR_INIT);
 504        await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
 505}
 506
 507/*
 508 * Perform impedance calibration on the DRAM controller side of the wire.
 509 */
 510static void mctl_set_impedance(u32 zq, bool odt_en)
 511{
 512        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 513        u32 reg_val;
 514        u32 zprog = zq & 0xFF, zdata = (zq >> 8) & 0xFFFFF;
 515
 516#ifndef CONFIG_MACH_SUN7I
 517        /* Appears that some kind of automatically initiated default
 518         * ZQ calibration is already in progress at this point on sun4i/sun5i
 519         * hardware, but not on sun7i. So it is reasonable to wait for its
 520         * completion before doing anything else. */
 521        await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
 522#endif
 523
 524        /* ZQ calibration is not really useful unless ODT is enabled */
 525        if (!odt_en)
 526                return;
 527
 528#ifdef CONFIG_MACH_SUN7I
 529        /* Enabling ODT in SDR_IOCR on sun7i hardware results in a deadlock
 530         * unless bit 24 is set in SDR_ZQCR1. Not much is known about the
 531         * SDR_ZQCR1 register, but there are hints indicating that it might
 532         * be related to periodic impedance re-calibration. This particular
 533         * magic value is borrowed from the Allwinner boot0 bootloader, and
 534         * using it helps to avoid troubles */
 535        writel((1 << 24) | (1 << 1), &dram->zqcr1);
 536#endif
 537
 538        /* Needed at least for sun5i, because it does not self clear there */
 539        clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
 540
 541        if (zdata) {
 542                /* Set the user supplied impedance data */
 543                reg_val = DRAM_ZQCR0_ZDEN | zdata;
 544                writel(reg_val, &dram->zqcr0);
 545                /* no need to wait, this takes effect immediately */
 546        } else {
 547                /* Do the calibration using the external resistor */
 548                reg_val = DRAM_ZQCR0_ZCAL | DRAM_ZQCR0_IMP_DIV(zprog);
 549                writel(reg_val, &dram->zqcr0);
 550                /* Wait for the new impedance configuration to settle */
 551                await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
 552        }
 553
 554        /* Needed at least for sun5i, because it does not self clear there */
 555        clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
 556
 557        /* Set I/O configure register */
 558        writel(DRAM_IOCR_ODT_EN, &dram->iocr);
 559}
 560
 561static unsigned long dramc_init_helper(struct dram_para *para)
 562{
 563        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 564        u32 reg_val;
 565        u32 density;
 566        int ret_val;
 567
 568        /*
 569         * only single rank DDR3 is supported by this code even though the
 570         * hardware can theoretically support DDR2 and up to two ranks
 571         */
 572        if (para->type != DRAM_MEMORY_TYPE_DDR3 || para->rank_num != 1)
 573                return 0;
 574
 575        /* setup DRAM relative clock */
 576        mctl_setup_dram_clock(para->clock, para->mbus_clock);
 577
 578        /* Disable any pad power save control */
 579        mctl_disable_power_save();
 580
 581        mctl_set_drive();
 582
 583        /* dram clock off */
 584        dramc_clock_output_en(0);
 585
 586#ifdef CONFIG_MACH_SUN4I
 587        /* select dram controller 1 */
 588        writel(DRAM_CSEL_MAGIC, &dram->csel);
 589#endif
 590
 591        mctl_itm_disable();
 592        mctl_enable_dll0(para->tpr3);
 593
 594        /* configure external DRAM */
 595        reg_val = DRAM_DCR_TYPE_DDR3;
 596        reg_val |= DRAM_DCR_IO_WIDTH(para->io_width >> 3);
 597
 598        if (para->density == 256)
 599                density = DRAM_DCR_CHIP_DENSITY_256M;
 600        else if (para->density == 512)
 601                density = DRAM_DCR_CHIP_DENSITY_512M;
 602        else if (para->density == 1024)
 603                density = DRAM_DCR_CHIP_DENSITY_1024M;
 604        else if (para->density == 2048)
 605                density = DRAM_DCR_CHIP_DENSITY_2048M;
 606        else if (para->density == 4096)
 607                density = DRAM_DCR_CHIP_DENSITY_4096M;
 608        else if (para->density == 8192)
 609                density = DRAM_DCR_CHIP_DENSITY_8192M;
 610        else
 611                density = DRAM_DCR_CHIP_DENSITY_256M;
 612
 613        reg_val |= DRAM_DCR_CHIP_DENSITY(density);
 614        reg_val |= DRAM_DCR_BUS_WIDTH((para->bus_width >> 3) - 1);
 615        reg_val |= DRAM_DCR_RANK_SEL(para->rank_num - 1);
 616        reg_val |= DRAM_DCR_CMD_RANK_ALL;
 617        reg_val |= DRAM_DCR_MODE(DRAM_DCR_MODE_INTERLEAVE);
 618        writel(reg_val, &dram->dcr);
 619
 620        dramc_clock_output_en(1);
 621
 622        mctl_set_impedance(para->zq, para->odt_en);
 623
 624        mctl_set_cke_delay();
 625
 626        mctl_ddr3_reset();
 627
 628        udelay(1);
 629
 630        await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
 631
 632        mctl_enable_dllx(para->tpr3);
 633
 634        /* set refresh period */
 635        dramc_set_autorefresh_cycle(para->clock, density);
 636
 637        /* set timing parameters */
 638        writel(para->tpr0, &dram->tpr0);
 639        writel(para->tpr1, &dram->tpr1);
 640        writel(para->tpr2, &dram->tpr2);
 641
 642        reg_val = DRAM_MR_BURST_LENGTH(0x0);
 643#if (defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I))
 644        reg_val |= DRAM_MR_POWER_DOWN;
 645#endif
 646        reg_val |= DRAM_MR_CAS_LAT(para->cas - 4);
 647        reg_val |= DRAM_MR_WRITE_RECOVERY(ddr3_write_recovery(para->clock));
 648        writel(reg_val, &dram->mr);
 649
 650        writel(para->emr1, &dram->emr);
 651        writel(para->emr2, &dram->emr2);
 652        writel(para->emr3, &dram->emr3);
 653
 654        /* disable drift compensation and set passive DQS window mode */
 655        clrsetbits_le32(&dram->ccr, DRAM_CCR_DQS_DRIFT_COMP, DRAM_CCR_DQS_GATE);
 656
 657#ifdef CONFIG_MACH_SUN7I
 658        /* Command rate timing mode 2T & 1T */
 659        if (para->tpr4 & 0x1)
 660                setbits_le32(&dram->ccr, DRAM_CCR_COMMAND_RATE_1T);
 661#endif
 662        /* initialize external DRAM */
 663        mctl_ddr3_initialize();
 664
 665        /* scan read pipe value */
 666        mctl_itm_enable();
 667
 668        /* Hardware DQS gate training */
 669        ret_val = dramc_scan_readpipe();
 670
 671        if (ret_val < 0)
 672                return 0;
 673
 674        /* allow to override the DQS training results with a custom delay */
 675        if (para->dqs_gating_delay)
 676                mctl_set_dqs_gating_delay(0, para->dqs_gating_delay);
 677
 678        /* set the DQS gating window type */
 679        if (para->active_windowing)
 680                clrbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
 681        else
 682                setbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
 683
 684        mctl_itm_reset();
 685
 686        /* configure all host port */
 687        mctl_configure_hostport();
 688
 689        return get_ram_size((long *)PHYS_SDRAM_0, PHYS_SDRAM_0_SIZE);
 690}
 691
 692unsigned long dramc_init(struct dram_para *para)
 693{
 694        unsigned long dram_size, actual_density;
 695
 696        /* If the dram configuration is not provided, use a default */
 697        if (!para)
 698                return 0;
 699
 700        /* if everything is known, then autodetection is not necessary */
 701        if (para->io_width && para->bus_width && para->density)
 702                return dramc_init_helper(para);
 703
 704        /* try to autodetect the DRAM bus width and density */
 705        para->io_width  = 16;
 706        para->bus_width = 32;
 707#if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN5I)
 708        /* only A0-A14 address lines on A10/A13, limiting max density to 4096 */
 709        para->density = 4096;
 710#else
 711        /* all A0-A15 address lines on A20, which allow density 8192 */
 712        para->density = 8192;
 713#endif
 714
 715        dram_size = dramc_init_helper(para);
 716        if (!dram_size) {
 717                /* if 32-bit bus width failed, try 16-bit bus width instead */
 718                para->bus_width = 16;
 719                dram_size = dramc_init_helper(para);
 720                if (!dram_size) {
 721                        /* if 16-bit bus width also failed, then bail out */
 722                        return dram_size;
 723                }
 724        }
 725
 726        /* check if we need to adjust the density */
 727        actual_density = (dram_size >> 17) * para->io_width / para->bus_width;
 728
 729        if (actual_density != para->density) {
 730                /* update the density and re-initialize DRAM again */
 731                para->density = actual_density;
 732                dram_size = dramc_init_helper(para);
 733        }
 734
 735        return dram_size;
 736}
 737