uboot/arch/arm/mach-sunxi/dram_sun4i.c
<<
>>
Prefs
   1/*
   2 * sunxi DRAM controller initialization
   3 * (C) Copyright 2012 Henrik Nordstrom <henrik@henriknordstrom.net>
   4 * (C) Copyright 2013 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
   5 *
   6 * Based on sun4i Linux kernel sources mach-sunxi/pm/standby/dram*.c
   7 * and earlier U-Boot Allwiner A10 SPL work
   8 *
   9 * (C) Copyright 2007-2012
  10 * Allwinner Technology Co., Ltd. <www.allwinnertech.com>
  11 * Berg Xing <bergxing@allwinnertech.com>
  12 * Tom Cubie <tangliang@allwinnertech.com>
  13 *
  14 * SPDX-License-Identifier:     GPL-2.0+
  15 */
  16
  17/*
  18 * Unfortunately the only documentation we have on the sun7i DRAM
  19 * controller is Allwinner boot0 + boot1 code, and that code uses
  20 * magic numbers & shifts with no explanations. Hence this code is
  21 * rather undocumented and full of magic.
  22 */
  23
  24#include <common.h>
  25#include <asm/io.h>
  26#include <asm/arch/clock.h>
  27#include <asm/arch/dram.h>
  28#include <asm/arch/timer.h>
  29#include <asm/arch/sys_proto.h>
  30
  31#define CPU_CFG_CHIP_VER(n) ((n) << 6)
  32#define CPU_CFG_CHIP_VER_MASK CPU_CFG_CHIP_VER(0x3)
  33#define CPU_CFG_CHIP_REV_A 0x0
  34#define CPU_CFG_CHIP_REV_C1 0x1
  35#define CPU_CFG_CHIP_REV_C2 0x2
  36#define CPU_CFG_CHIP_REV_B 0x3
  37
  38/*
  39 * Wait up to 1s for mask to be clear in given reg.
  40 */
  41static inline void await_bits_clear(u32 *reg, u32 mask)
  42{
  43        mctl_await_completion(reg, mask, 0);
  44}
  45
  46/*
  47 * Wait up to 1s for mask to be set in given reg.
  48 */
  49static inline void await_bits_set(u32 *reg, u32 mask)
  50{
  51        mctl_await_completion(reg, mask, mask);
  52}
  53
  54/*
  55 * This performs the external DRAM reset by driving the RESET pin low and
  56 * then high again. According to the DDR3 spec, the RESET pin needs to be
  57 * kept low for at least 200 us.
  58 */
  59static void mctl_ddr3_reset(void)
  60{
  61        struct sunxi_dram_reg *dram =
  62                        (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
  63
  64#ifdef CONFIG_MACH_SUN4I
  65        struct sunxi_timer_reg *timer =
  66                        (struct sunxi_timer_reg *)SUNXI_TIMER_BASE;
  67        u32 reg_val;
  68
  69        writel(0, &timer->cpu_cfg);
  70        reg_val = readl(&timer->cpu_cfg);
  71
  72        if ((reg_val & CPU_CFG_CHIP_VER_MASK) !=
  73            CPU_CFG_CHIP_VER(CPU_CFG_CHIP_REV_A)) {
  74                setbits_le32(&dram->mcr, DRAM_MCR_RESET);
  75                udelay(200);
  76                clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
  77        } else
  78#endif
  79        {
  80                clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
  81                udelay(200);
  82                setbits_le32(&dram->mcr, DRAM_MCR_RESET);
  83        }
  84        /* After the RESET pin is de-asserted, the DDR3 spec requires to wait
  85         * for additional 500 us before driving the CKE pin (Clock Enable)
  86         * high. The duration of this delay can be configured in the SDR_IDCR
  87         * (Initialization Delay Configuration Register) and applied
  88         * automatically by the DRAM controller during the DDR3 initialization
  89         * step. But SDR_IDCR has limited range on sun4i/sun5i hardware and
  90         * can't provide sufficient delay at DRAM clock frequencies higher than
  91         * 524 MHz (while Allwinner A13 supports DRAM clock frequency up to
  92         * 533 MHz according to the datasheet). Additionally, there is no
  93         * official documentation for the SDR_IDCR register anywhere, and
  94         * there is always a chance that we are interpreting it wrong.
  95         * Better be safe than sorry, so add an explicit delay here. */
  96        udelay(500);
  97}
  98
  99static void mctl_set_drive(void)
 100{
 101        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 102
 103#ifdef CONFIG_MACH_SUN7I
 104        clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3) | (0x3 << 28),
 105#else
 106        clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3),
 107#endif
 108                        DRAM_MCR_MODE_EN(0x3) |
 109                        0xffc);
 110}
 111
 112static void mctl_itm_disable(void)
 113{
 114        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 115
 116        clrsetbits_le32(&dram->ccr, DRAM_CCR_INIT, DRAM_CCR_ITM_OFF);
 117}
 118
 119static void mctl_itm_enable(void)
 120{
 121        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 122
 123        clrbits_le32(&dram->ccr, DRAM_CCR_ITM_OFF);
 124}
 125
 126static void mctl_itm_reset(void)
 127{
 128        mctl_itm_disable();
 129        udelay(1); /* ITM reset needs a bit of delay */
 130        mctl_itm_enable();
 131        udelay(1);
 132}
 133
 134static void mctl_enable_dll0(u32 phase)
 135{
 136        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 137
 138        clrsetbits_le32(&dram->dllcr[0], 0x3f << 6,
 139                        ((phase >> 16) & 0x3f) << 6);
 140        clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET, DRAM_DLLCR_DISABLE);
 141        udelay(2);
 142
 143        clrbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET | DRAM_DLLCR_DISABLE);
 144        udelay(22);
 145
 146        clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_DISABLE, DRAM_DLLCR_NRESET);
 147        udelay(22);
 148}
 149
 150/* Get the number of DDR byte lanes */
 151static u32 mctl_get_number_of_lanes(void)
 152{
 153        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 154        if ((readl(&dram->dcr) & DRAM_DCR_BUS_WIDTH_MASK) ==
 155                                DRAM_DCR_BUS_WIDTH(DRAM_DCR_BUS_WIDTH_32BIT))
 156                return 4;
 157        else
 158                return 2;
 159}
 160
 161/*
 162 * Note: This differs from pm/standby in that it checks the bus width
 163 */
 164static void mctl_enable_dllx(u32 phase)
 165{
 166        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 167        u32 i, number_of_lanes;
 168
 169        number_of_lanes = mctl_get_number_of_lanes();
 170
 171        for (i = 1; i <= number_of_lanes; i++) {
 172                clrsetbits_le32(&dram->dllcr[i], 0xf << 14,
 173                                (phase & 0xf) << 14);
 174                clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET,
 175                                DRAM_DLLCR_DISABLE);
 176                phase >>= 4;
 177        }
 178        udelay(2);
 179
 180        for (i = 1; i <= number_of_lanes; i++)
 181                clrbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET |
 182                             DRAM_DLLCR_DISABLE);
 183        udelay(22);
 184
 185        for (i = 1; i <= number_of_lanes; i++)
 186                clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_DISABLE,
 187                                DRAM_DLLCR_NRESET);
 188        udelay(22);
 189}
 190
 191static u32 hpcr_value[32] = {
 192#ifdef CONFIG_MACH_SUN5I
 193        0, 0, 0, 0,
 194        0, 0, 0, 0,
 195        0, 0, 0, 0,
 196        0, 0, 0, 0,
 197        0x1031, 0x1031, 0x0735, 0x1035,
 198        0x1035, 0x0731, 0x1031, 0,
 199        0x0301, 0x0301, 0x0301, 0x0301,
 200        0x0301, 0x0301, 0x0301, 0
 201#endif
 202#ifdef CONFIG_MACH_SUN4I
 203        0x0301, 0x0301, 0x0301, 0x0301,
 204        0x0301, 0x0301, 0, 0,
 205        0, 0, 0, 0,
 206        0, 0, 0, 0,
 207        0x1031, 0x1031, 0x0735, 0x5031,
 208        0x1035, 0x0731, 0x1031, 0x0735,
 209        0x1035, 0x1031, 0x0731, 0x1035,
 210        0x1031, 0x0301, 0x0301, 0x0731
 211#endif
 212#ifdef CONFIG_MACH_SUN7I
 213        0x0301, 0x0301, 0x0301, 0x0301,
 214        0x0301, 0x0301, 0x0301, 0x0301,
 215        0, 0, 0, 0,
 216        0, 0, 0, 0,
 217        0x1031, 0x1031, 0x0735, 0x1035,
 218        0x1035, 0x0731, 0x1031, 0x0735,
 219        0x1035, 0x1031, 0x0731, 0x1035,
 220        0x0001, 0x1031, 0, 0x1031
 221        /* last row differs from boot0 source table
 222         * 0x1031, 0x0301, 0x0301, 0x0731
 223         * but boot0 code skips #28 and #30, and sets #29 and #31 to the
 224         * value from #28 entry (0x1031)
 225         */
 226#endif
 227};
 228
 229static void mctl_configure_hostport(void)
 230{
 231        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 232        u32 i;
 233
 234        for (i = 0; i < 32; i++)
 235                writel(hpcr_value[i], &dram->hpcr[i]);
 236}
 237
 238static void mctl_setup_dram_clock(u32 clk, u32 mbus_clk)
 239{
 240        u32 reg_val;
 241        struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
 242        u32 pll5p_clk, pll6x_clk;
 243        u32 pll5p_div, pll6x_div;
 244        u32 pll5p_rate, pll6x_rate;
 245
 246        /* setup DRAM PLL */
 247        reg_val = readl(&ccm->pll5_cfg);
 248        reg_val &= ~CCM_PLL5_CTRL_M_MASK;               /* set M to 0 (x1) */
 249        reg_val &= ~CCM_PLL5_CTRL_K_MASK;               /* set K to 0 (x1) */
 250        reg_val &= ~CCM_PLL5_CTRL_N_MASK;               /* set N to 0 (x0) */
 251        reg_val &= ~CCM_PLL5_CTRL_P_MASK;               /* set P to 0 (x1) */
 252#ifdef CONFIG_OLD_SUNXI_KERNEL_COMPAT
 253        /* Old kernels are hardcoded to P=1 (divide by 2) */
 254        reg_val |= CCM_PLL5_CTRL_P(1);
 255#endif
 256        if (clk >= 540 && clk < 552) {
 257                /* dram = 540MHz */
 258                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
 259                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
 260                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(15));
 261        } else if (clk >= 512 && clk < 528) {
 262                /* dram = 512MHz */
 263                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
 264                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(4));
 265                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(16));
 266        } else if (clk >= 496 && clk < 504) {
 267                /* dram = 496MHz */
 268                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
 269                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
 270                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(31));
 271        } else if (clk >= 468 && clk < 480) {
 272                /* dram = 468MHz */
 273                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
 274                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
 275                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(13));
 276        } else if (clk >= 396 && clk < 408) {
 277                /* dram = 396MHz */
 278                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
 279                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
 280                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(11));
 281        } else  {
 282                /* any other frequency that is a multiple of 24 */
 283                reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
 284                reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
 285                reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(clk / 24));
 286        }
 287        reg_val &= ~CCM_PLL5_CTRL_VCO_GAIN;             /* PLL VCO Gain off */
 288        reg_val |= CCM_PLL5_CTRL_EN;                    /* PLL On */
 289        writel(reg_val, &ccm->pll5_cfg);
 290        udelay(5500);
 291
 292        setbits_le32(&ccm->pll5_cfg, CCM_PLL5_CTRL_DDR_CLK);
 293
 294#if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN7I)
 295        /* reset GPS */
 296        clrbits_le32(&ccm->gps_clk_cfg, CCM_GPS_CTRL_RESET | CCM_GPS_CTRL_GATE);
 297        setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
 298        udelay(1);
 299        clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
 300#endif
 301
 302        /* setup MBUS clock */
 303        if (!mbus_clk)
 304                mbus_clk = 300;
 305
 306        /* PLL5P and PLL6 are the potential clock sources for MBUS */
 307        pll6x_clk = clock_get_pll6() / 1000000;
 308#ifdef CONFIG_MACH_SUN7I
 309        pll6x_clk *= 2; /* sun7i uses PLL6*2, sun5i uses just PLL6 */
 310#endif
 311        pll5p_clk = clock_get_pll5p() / 1000000;
 312        pll6x_div = DIV_ROUND_UP(pll6x_clk, mbus_clk);
 313        pll5p_div = DIV_ROUND_UP(pll5p_clk, mbus_clk);
 314        pll6x_rate = pll6x_clk / pll6x_div;
 315        pll5p_rate = pll5p_clk / pll5p_div;
 316
 317        if (pll6x_div <= 16 && pll6x_rate > pll5p_rate) {
 318                /* use PLL6 as the MBUS clock source */
 319                reg_val = CCM_MBUS_CTRL_GATE |
 320                          CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL6) |
 321                          CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
 322                          CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll6x_div));
 323        } else if (pll5p_div <= 16) {
 324                /* use PLL5P as the MBUS clock source */
 325                reg_val = CCM_MBUS_CTRL_GATE |
 326                          CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL5) |
 327                          CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
 328                          CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll5p_div));
 329        } else {
 330                panic("Bad mbus_clk\n");
 331        }
 332        writel(reg_val, &ccm->mbus_clk_cfg);
 333
 334        /*
 335         * open DRAMC AHB & DLL register clock
 336         * close it first
 337         */
 338#if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
 339        clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
 340#else
 341        clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
 342#endif
 343        udelay(22);
 344
 345        /* then open it */
 346#if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
 347        setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
 348#else
 349        setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
 350#endif
 351        udelay(22);
 352}
 353
 354/*
 355 * The data from rslrX and rdgrX registers (X=rank) is stored
 356 * in a single 32-bit value using the following format:
 357 *   bits [31:26] - DQS gating system latency for byte lane 3
 358 *   bits [25:24] - DQS gating phase select for byte lane 3
 359 *   bits [23:18] - DQS gating system latency for byte lane 2
 360 *   bits [17:16] - DQS gating phase select for byte lane 2
 361 *   bits [15:10] - DQS gating system latency for byte lane 1
 362 *   bits [ 9:8 ] - DQS gating phase select for byte lane 1
 363 *   bits [ 7:2 ] - DQS gating system latency for byte lane 0
 364 *   bits [ 1:0 ] - DQS gating phase select for byte lane 0
 365 */
 366static void mctl_set_dqs_gating_delay(int rank, u32 dqs_gating_delay)
 367{
 368        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 369        u32 lane, number_of_lanes = mctl_get_number_of_lanes();
 370        /* rank0 gating system latency (3 bits per lane: cycles) */
 371        u32 slr = readl(rank == 0 ? &dram->rslr0 : &dram->rslr1);
 372        /* rank0 gating phase select (2 bits per lane: 90, 180, 270, 360) */
 373        u32 dgr = readl(rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
 374        for (lane = 0; lane < number_of_lanes; lane++) {
 375                u32 tmp = dqs_gating_delay >> (lane * 8);
 376                slr &= ~(7 << (lane * 3));
 377                slr |= ((tmp >> 2) & 7) << (lane * 3);
 378                dgr &= ~(3 << (lane * 2));
 379                dgr |= (tmp & 3) << (lane * 2);
 380        }
 381        writel(slr, rank == 0 ? &dram->rslr0 : &dram->rslr1);
 382        writel(dgr, rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
 383}
 384
 385static int dramc_scan_readpipe(void)
 386{
 387        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 388        u32 reg_val;
 389
 390        /* data training trigger */
 391        clrbits_le32(&dram->csr, DRAM_CSR_FAILED);
 392        setbits_le32(&dram->ccr, DRAM_CCR_DATA_TRAINING);
 393
 394        /* check whether data training process has completed */
 395        await_bits_clear(&dram->ccr, DRAM_CCR_DATA_TRAINING);
 396
 397        /* check data training result */
 398        reg_val = readl(&dram->csr);
 399        if (reg_val & DRAM_CSR_FAILED)
 400                return -1;
 401
 402        return 0;
 403}
 404
 405static void dramc_clock_output_en(u32 on)
 406{
 407#if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
 408        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 409
 410        if (on)
 411                setbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
 412        else
 413                clrbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
 414#endif
 415#ifdef CONFIG_MACH_SUN4I
 416        struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
 417        if (on)
 418                setbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
 419        else
 420                clrbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
 421#endif
 422}
 423
 424/* tRFC in nanoseconds for different densities (from the DDR3 spec) */
 425static const u16 tRFC_DDR3_table[6] = {
 426        /* 256Mb    512Mb    1Gb      2Gb      4Gb      8Gb */
 427           90,      90,      110,     160,     300,     350
 428};
 429
 430static void dramc_set_autorefresh_cycle(u32 clk, u32 density)
 431{
 432        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 433        u32 tRFC, tREFI;
 434
 435        tRFC = (tRFC_DDR3_table[density] * clk + 999) / 1000;
 436        tREFI = (7987 * clk) >> 10;     /* <= 7.8us */
 437
 438        writel(DRAM_DRR_TREFI(tREFI) | DRAM_DRR_TRFC(tRFC), &dram->drr);
 439}
 440
 441/* Calculate the value for A11, A10, A9 bits in MR0 (write recovery) */
 442static u32 ddr3_write_recovery(u32 clk)
 443{
 444        u32 twr_ns = 15; /* DDR3 spec says that it is 15ns for all speed bins */
 445        u32 twr_ck = (twr_ns * clk + 999) / 1000;
 446        if (twr_ck < 5)
 447                return 1;
 448        else if (twr_ck <= 8)
 449                return twr_ck - 4;
 450        else if (twr_ck <= 10)
 451                return 5;
 452        else
 453                return 6;
 454}
 455
 456/*
 457 * If the dram->ppwrsctl (SDR_DPCR) register has the lowest bit set to 1, this
 458 * means that DRAM is currently in self-refresh mode and retaining the old
 459 * data. Since we have no idea what to do in this situation yet, just set this
 460 * register to 0 and initialize DRAM in the same way as on any normal reboot
 461 * (discarding whatever was stored there).
 462 *
 463 * Note: on sun7i hardware, the highest 16 bits need to be set to 0x1651 magic
 464 * value for this write operation to have any effect. On sun5i hadware this
 465 * magic value is not necessary. And on sun4i hardware the writes to this
 466 * register seem to have no effect at all.
 467 */
 468static void mctl_disable_power_save(void)
 469{
 470        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 471        writel(0x16510000, &dram->ppwrsctl);
 472}
 473
 474/*
 475 * After the DRAM is powered up or reset, the DDR3 spec requires to wait at
 476 * least 500 us before driving the CKE pin (Clock Enable) high. The dram->idct
 477 * (SDR_IDCR) register appears to configure this delay, which gets applied
 478 * right at the time when the DRAM initialization is activated in the
 479 * 'mctl_ddr3_initialize' function.
 480 */
 481static void mctl_set_cke_delay(void)
 482{
 483        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 484
 485        /* The CKE delay is represented in DRAM clock cycles, multiplied by N
 486         * (where N=2 for sun4i/sun5i and N=3 for sun7i). Here it is set to
 487         * the maximum possible value 0x1ffff, just like in the Allwinner's
 488         * boot0 bootloader. The resulting delay value is somewhere between
 489         * ~0.4 ms (sun5i with 648 MHz DRAM clock speed) and ~1.1 ms (sun7i
 490         * with 360 MHz DRAM clock speed). */
 491        setbits_le32(&dram->idcr, 0x1ffff);
 492}
 493
 494/*
 495 * This triggers the DRAM initialization. It performs sending the mode registers
 496 * to the DRAM among other things. Very likely the ZQCL command is also getting
 497 * executed (to do the initial impedance calibration on the DRAM side of the
 498 * wire). The memory controller and the PHY must be already configured before
 499 * calling this function.
 500 */
 501static void mctl_ddr3_initialize(void)
 502{
 503        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 504        setbits_le32(&dram->ccr, DRAM_CCR_INIT);
 505        await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
 506}
 507
 508/*
 509 * Perform impedance calibration on the DRAM controller side of the wire.
 510 */
 511static void mctl_set_impedance(u32 zq, bool odt_en)
 512{
 513        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 514        u32 reg_val;
 515        u32 zprog = zq & 0xFF, zdata = (zq >> 8) & 0xFFFFF;
 516
 517#ifndef CONFIG_MACH_SUN7I
 518        /* Appears that some kind of automatically initiated default
 519         * ZQ calibration is already in progress at this point on sun4i/sun5i
 520         * hardware, but not on sun7i. So it is reasonable to wait for its
 521         * completion before doing anything else. */
 522        await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
 523#endif
 524
 525        /* ZQ calibration is not really useful unless ODT is enabled */
 526        if (!odt_en)
 527                return;
 528
 529#ifdef CONFIG_MACH_SUN7I
 530        /* Enabling ODT in SDR_IOCR on sun7i hardware results in a deadlock
 531         * unless bit 24 is set in SDR_ZQCR1. Not much is known about the
 532         * SDR_ZQCR1 register, but there are hints indicating that it might
 533         * be related to periodic impedance re-calibration. This particular
 534         * magic value is borrowed from the Allwinner boot0 bootloader, and
 535         * using it helps to avoid troubles */
 536        writel((1 << 24) | (1 << 1), &dram->zqcr1);
 537#endif
 538
 539        /* Needed at least for sun5i, because it does not self clear there */
 540        clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
 541
 542        if (zdata) {
 543                /* Set the user supplied impedance data */
 544                reg_val = DRAM_ZQCR0_ZDEN | zdata;
 545                writel(reg_val, &dram->zqcr0);
 546                /* no need to wait, this takes effect immediately */
 547        } else {
 548                /* Do the calibration using the external resistor */
 549                reg_val = DRAM_ZQCR0_ZCAL | DRAM_ZQCR0_IMP_DIV(zprog);
 550                writel(reg_val, &dram->zqcr0);
 551                /* Wait for the new impedance configuration to settle */
 552                await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
 553        }
 554
 555        /* Needed at least for sun5i, because it does not self clear there */
 556        clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
 557
 558        /* Set I/O configure register */
 559        writel(DRAM_IOCR_ODT_EN, &dram->iocr);
 560}
 561
 562static unsigned long dramc_init_helper(struct dram_para *para)
 563{
 564        struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
 565        u32 reg_val;
 566        u32 density;
 567        int ret_val;
 568
 569        /*
 570         * only single rank DDR3 is supported by this code even though the
 571         * hardware can theoretically support DDR2 and up to two ranks
 572         */
 573        if (para->type != DRAM_MEMORY_TYPE_DDR3 || para->rank_num != 1)
 574                return 0;
 575
 576        /* setup DRAM relative clock */
 577        mctl_setup_dram_clock(para->clock, para->mbus_clock);
 578
 579        /* Disable any pad power save control */
 580        mctl_disable_power_save();
 581
 582        mctl_set_drive();
 583
 584        /* dram clock off */
 585        dramc_clock_output_en(0);
 586
 587#ifdef CONFIG_MACH_SUN4I
 588        /* select dram controller 1 */
 589        writel(DRAM_CSEL_MAGIC, &dram->csel);
 590#endif
 591
 592        mctl_itm_disable();
 593        mctl_enable_dll0(para->tpr3);
 594
 595        /* configure external DRAM */
 596        reg_val = DRAM_DCR_TYPE_DDR3;
 597        reg_val |= DRAM_DCR_IO_WIDTH(para->io_width >> 3);
 598
 599        if (para->density == 256)
 600                density = DRAM_DCR_CHIP_DENSITY_256M;
 601        else if (para->density == 512)
 602                density = DRAM_DCR_CHIP_DENSITY_512M;
 603        else if (para->density == 1024)
 604                density = DRAM_DCR_CHIP_DENSITY_1024M;
 605        else if (para->density == 2048)
 606                density = DRAM_DCR_CHIP_DENSITY_2048M;
 607        else if (para->density == 4096)
 608                density = DRAM_DCR_CHIP_DENSITY_4096M;
 609        else if (para->density == 8192)
 610                density = DRAM_DCR_CHIP_DENSITY_8192M;
 611        else
 612                density = DRAM_DCR_CHIP_DENSITY_256M;
 613
 614        reg_val |= DRAM_DCR_CHIP_DENSITY(density);
 615        reg_val |= DRAM_DCR_BUS_WIDTH((para->bus_width >> 3) - 1);
 616        reg_val |= DRAM_DCR_RANK_SEL(para->rank_num - 1);
 617        reg_val |= DRAM_DCR_CMD_RANK_ALL;
 618        reg_val |= DRAM_DCR_MODE(DRAM_DCR_MODE_INTERLEAVE);
 619        writel(reg_val, &dram->dcr);
 620
 621        dramc_clock_output_en(1);
 622
 623        mctl_set_impedance(para->zq, para->odt_en);
 624
 625        mctl_set_cke_delay();
 626
 627        mctl_ddr3_reset();
 628
 629        udelay(1);
 630
 631        await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
 632
 633        mctl_enable_dllx(para->tpr3);
 634
 635        /* set refresh period */
 636        dramc_set_autorefresh_cycle(para->clock, density);
 637
 638        /* set timing parameters */
 639        writel(para->tpr0, &dram->tpr0);
 640        writel(para->tpr1, &dram->tpr1);
 641        writel(para->tpr2, &dram->tpr2);
 642
 643        reg_val = DRAM_MR_BURST_LENGTH(0x0);
 644#if (defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I))
 645        reg_val |= DRAM_MR_POWER_DOWN;
 646#endif
 647        reg_val |= DRAM_MR_CAS_LAT(para->cas - 4);
 648        reg_val |= DRAM_MR_WRITE_RECOVERY(ddr3_write_recovery(para->clock));
 649        writel(reg_val, &dram->mr);
 650
 651        writel(para->emr1, &dram->emr);
 652        writel(para->emr2, &dram->emr2);
 653        writel(para->emr3, &dram->emr3);
 654
 655        /* disable drift compensation and set passive DQS window mode */
 656        clrsetbits_le32(&dram->ccr, DRAM_CCR_DQS_DRIFT_COMP, DRAM_CCR_DQS_GATE);
 657
 658#ifdef CONFIG_MACH_SUN7I
 659        /* Command rate timing mode 2T & 1T */
 660        if (para->tpr4 & 0x1)
 661                setbits_le32(&dram->ccr, DRAM_CCR_COMMAND_RATE_1T);
 662#endif
 663        /* initialize external DRAM */
 664        mctl_ddr3_initialize();
 665
 666        /* scan read pipe value */
 667        mctl_itm_enable();
 668
 669        /* Hardware DQS gate training */
 670        ret_val = dramc_scan_readpipe();
 671
 672        if (ret_val < 0)
 673                return 0;
 674
 675        /* allow to override the DQS training results with a custom delay */
 676        if (para->dqs_gating_delay)
 677                mctl_set_dqs_gating_delay(0, para->dqs_gating_delay);
 678
 679        /* set the DQS gating window type */
 680        if (para->active_windowing)
 681                clrbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
 682        else
 683                setbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
 684
 685        mctl_itm_reset();
 686
 687        /* configure all host port */
 688        mctl_configure_hostport();
 689
 690        return get_ram_size((long *)PHYS_SDRAM_0, PHYS_SDRAM_0_SIZE);
 691}
 692
 693unsigned long dramc_init(struct dram_para *para)
 694{
 695        unsigned long dram_size, actual_density;
 696
 697        /* If the dram configuration is not provided, use a default */
 698        if (!para)
 699                return 0;
 700
 701        /* if everything is known, then autodetection is not necessary */
 702        if (para->io_width && para->bus_width && para->density)
 703                return dramc_init_helper(para);
 704
 705        /* try to autodetect the DRAM bus width and density */
 706        para->io_width  = 16;
 707        para->bus_width = 32;
 708#if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN5I)
 709        /* only A0-A14 address lines on A10/A13, limiting max density to 4096 */
 710        para->density = 4096;
 711#else
 712        /* all A0-A15 address lines on A20, which allow density 8192 */
 713        para->density = 8192;
 714#endif
 715
 716        dram_size = dramc_init_helper(para);
 717        if (!dram_size) {
 718                /* if 32-bit bus width failed, try 16-bit bus width instead */
 719                para->bus_width = 16;
 720                dram_size = dramc_init_helper(para);
 721                if (!dram_size) {
 722                        /* if 16-bit bus width also failed, then bail out */
 723                        return dram_size;
 724                }
 725        }
 726
 727        /* check if we need to adjust the density */
 728        actual_density = (dram_size >> 17) * para->io_width / para->bus_width;
 729
 730        if (actual_density != para->density) {
 731                /* update the density and re-initialize DRAM again */
 732                para->density = actual_density;
 733                dram_size = dramc_init_helper(para);
 734        }
 735
 736        return dram_size;
 737}
 738