uboot/arch/arm/cpu/armv7/mx6/ddr.c
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2014 Gateworks Corporation
   3 * Author: Tim Harvey <tharvey@gateworks.com>
   4 *
   5 * SPDX-License-Identifier:     GPL-2.0+
   6 */
   7
   8#include <common.h>
   9#include <linux/types.h>
  10#include <asm/arch/clock.h>
  11#include <asm/arch/mx6-ddr.h>
  12#include <asm/arch/sys_proto.h>
  13#include <asm/io.h>
  14#include <asm/types.h>
  15#include <wait_bit.h>
  16
  17#if defined(CONFIG_MX6_DDRCAL)
  18static void reset_read_data_fifos(void)
  19{
  20        struct mmdc_p_regs *mmdc0 = (struct mmdc_p_regs *)MMDC_P0_BASE_ADDR;
  21
  22        /* Reset data FIFOs twice. */
  23        setbits_le32(&mmdc0->mpdgctrl0, 1 << 31);
  24        wait_for_bit("MMDC", &mmdc0->mpdgctrl0, 1 << 31, 0, 100, 0);
  25
  26        setbits_le32(&mmdc0->mpdgctrl0, 1 << 31);
  27        wait_for_bit("MMDC", &mmdc0->mpdgctrl0, 1 << 31, 0, 100, 0);
  28}
  29
  30static void precharge_all(const bool cs0_enable, const bool cs1_enable)
  31{
  32        struct mmdc_p_regs *mmdc0 = (struct mmdc_p_regs *)MMDC_P0_BASE_ADDR;
  33
  34        /*
  35         * Issue the Precharge-All command to the DDR device for both
  36         * chip selects. Note, CON_REQ bit should also remain set. If
  37         * only using one chip select, then precharge only the desired
  38         * chip select.
  39         */
  40        if (cs0_enable) { /* CS0 */
  41                writel(0x04008050, &mmdc0->mdscr);
  42                wait_for_bit("MMDC", &mmdc0->mdscr, 1 << 14, 1, 100, 0);
  43        }
  44
  45        if (cs1_enable) { /* CS1 */
  46                writel(0x04008058, &mmdc0->mdscr);
  47                wait_for_bit("MMDC", &mmdc0->mdscr, 1 << 14, 1, 100, 0);
  48        }
  49}
  50
  51static void force_delay_measurement(int bus_size)
  52{
  53        struct mmdc_p_regs *mmdc0 = (struct mmdc_p_regs *)MMDC_P0_BASE_ADDR;
  54        struct mmdc_p_regs *mmdc1 = (struct mmdc_p_regs *)MMDC_P1_BASE_ADDR;
  55
  56        writel(0x800, &mmdc0->mpmur0);
  57        if (bus_size == 0x2)
  58                writel(0x800, &mmdc1->mpmur0);
  59}
  60
  61static void modify_dg_result(u32 *reg_st0, u32 *reg_st1, u32 *reg_ctrl)
  62{
  63        u32 dg_tmp_val, dg_dl_abs_offset, dg_hc_del, val_ctrl;
  64
  65        /*
  66         * DQS gating absolute offset should be modified from reflecting
  67         * (HW_DG_LOWx + HW_DG_UPx)/2 to reflecting (HW_DG_UPx - 0x80)
  68         */
  69
  70        val_ctrl = readl(reg_ctrl);
  71        val_ctrl &= 0xf0000000;
  72
  73        dg_tmp_val = ((readl(reg_st0) & 0x07ff0000) >> 16) - 0xc0;
  74        dg_dl_abs_offset = dg_tmp_val & 0x7f;
  75        dg_hc_del = (dg_tmp_val & 0x780) << 1;
  76
  77        val_ctrl |= dg_dl_abs_offset + dg_hc_del;
  78
  79        dg_tmp_val = ((readl(reg_st1) & 0x07ff0000) >> 16) - 0xc0;
  80        dg_dl_abs_offset = dg_tmp_val & 0x7f;
  81        dg_hc_del = (dg_tmp_val & 0x780) << 1;
  82
  83        val_ctrl |= (dg_dl_abs_offset + dg_hc_del) << 16;
  84
  85        writel(val_ctrl, reg_ctrl);
  86}
  87
  88int mmdc_do_write_level_calibration(struct mx6_ddr_sysinfo const *sysinfo)
  89{
  90        struct mmdc_p_regs *mmdc0 = (struct mmdc_p_regs *)MMDC_P0_BASE_ADDR;
  91        struct mmdc_p_regs *mmdc1 = (struct mmdc_p_regs *)MMDC_P1_BASE_ADDR;
  92        u32 esdmisc_val, zq_val;
  93        u32 errors = 0;
  94        u32 ldectrl[4] = {0};
  95        u32 ddr_mr1 = 0x4;
  96        u32 rwalat_max;
  97
  98        /*
  99         * Stash old values in case calibration fails,
 100         * we need to restore them
 101         */
 102        ldectrl[0] = readl(&mmdc0->mpwldectrl0);
 103        ldectrl[1] = readl(&mmdc0->mpwldectrl1);
 104        if (sysinfo->dsize == 2) {
 105                ldectrl[2] = readl(&mmdc1->mpwldectrl0);
 106                ldectrl[3] = readl(&mmdc1->mpwldectrl1);
 107        }
 108
 109        /* disable DDR logic power down timer */
 110        clrbits_le32(&mmdc0->mdpdc, 0xff00);
 111
 112        /* disable Adopt power down timer */
 113        setbits_le32(&mmdc0->mapsr, 0x1);
 114
 115        debug("Starting write leveling calibration.\n");
 116
 117        /*
 118         * 2. disable auto refresh and ZQ calibration
 119         * before proceeding with Write Leveling calibration
 120         */
 121        esdmisc_val = readl(&mmdc0->mdref);
 122        writel(0x0000C000, &mmdc0->mdref);
 123        zq_val = readl(&mmdc0->mpzqhwctrl);
 124        writel(zq_val & ~0x3, &mmdc0->mpzqhwctrl);
 125
 126        /* 3. increase walat and ralat to maximum */
 127        rwalat_max = (1 << 6) | (1 << 7) | (1 << 8) | (1 << 16) | (1 << 17);
 128        setbits_le32(&mmdc0->mdmisc, rwalat_max);
 129        if (sysinfo->dsize == 2)
 130                setbits_le32(&mmdc1->mdmisc, rwalat_max);
 131        /*
 132         * 4 & 5. Configure the external DDR device to enter write-leveling
 133         * mode through Load Mode Register command.
 134         * Register setting:
 135         * Bits[31:16] MR1 value (0x0080 write leveling enable)
 136         * Bit[9] set WL_EN to enable MMDC DQS output
 137         * Bits[6:4] set CMD bits for Load Mode Register programming
 138         * Bits[2:0] set CMD_BA to 0x1 for DDR MR1 programming
 139         */
 140        writel(0x00808231, &mmdc0->mdscr);
 141
 142        /* 6. Activate automatic calibration by setting MPWLGCR[HW_WL_EN] */
 143        writel(0x00000001, &mmdc0->mpwlgcr);
 144
 145        /*
 146         * 7. Upon completion of this process the MMDC de-asserts
 147         * the MPWLGCR[HW_WL_EN]
 148         */
 149        wait_for_bit("MMDC", &mmdc0->mpwlgcr, 1 << 0, 0, 100, 0);
 150
 151        /*
 152         * 8. check for any errors: check both PHYs for x64 configuration,
 153         * if x32, check only PHY0
 154         */
 155        if (readl(&mmdc0->mpwlgcr) & 0x00000F00)
 156                errors |= 1;
 157        if (sysinfo->dsize == 2)
 158                if (readl(&mmdc1->mpwlgcr) & 0x00000F00)
 159                        errors |= 2;
 160
 161        debug("Ending write leveling calibration. Error mask: 0x%x\n", errors);
 162
 163        /* check to see if cal failed */
 164        if ((readl(&mmdc0->mpwldectrl0) == 0x001F001F) &&
 165            (readl(&mmdc0->mpwldectrl1) == 0x001F001F) &&
 166            ((sysinfo->dsize < 2) ||
 167             ((readl(&mmdc1->mpwldectrl0) == 0x001F001F) &&
 168              (readl(&mmdc1->mpwldectrl1) == 0x001F001F)))) {
 169                debug("Cal seems to have soft-failed due to memory not supporting write leveling on all channels. Restoring original write leveling values.\n");
 170                writel(ldectrl[0], &mmdc0->mpwldectrl0);
 171                writel(ldectrl[1], &mmdc0->mpwldectrl1);
 172                if (sysinfo->dsize == 2) {
 173                        writel(ldectrl[2], &mmdc1->mpwldectrl0);
 174                        writel(ldectrl[3], &mmdc1->mpwldectrl1);
 175                }
 176                errors |= 4;
 177        }
 178
 179        /*
 180         * User should issue MRS command to exit write leveling mode
 181         * through Load Mode Register command
 182         * Register setting:
 183         * Bits[31:16] MR1 value "ddr_mr1" value from initialization
 184         * Bit[9] clear WL_EN to disable MMDC DQS output
 185         * Bits[6:4] set CMD bits for Load Mode Register programming
 186         * Bits[2:0] set CMD_BA to 0x1 for DDR MR1 programming
 187         */
 188        writel((ddr_mr1 << 16) + 0x8031, &mmdc0->mdscr);
 189
 190        /* re-enable auto refresh and zq cal */
 191        writel(esdmisc_val, &mmdc0->mdref);
 192        writel(zq_val, &mmdc0->mpzqhwctrl);
 193
 194        debug("\tMMDC_MPWLDECTRL0 after write level cal: 0x%08X\n",
 195              readl(&mmdc0->mpwldectrl0));
 196        debug("\tMMDC_MPWLDECTRL1 after write level cal: 0x%08X\n",
 197              readl(&mmdc0->mpwldectrl1));
 198        if (sysinfo->dsize == 2) {
 199                debug("\tMMDC_MPWLDECTRL0 after write level cal: 0x%08X\n",
 200                      readl(&mmdc1->mpwldectrl0));
 201                debug("\tMMDC_MPWLDECTRL1 after write level cal: 0x%08X\n",
 202                      readl(&mmdc1->mpwldectrl1));
 203        }
 204
 205        /* We must force a readback of these values, to get them to stick */
 206        readl(&mmdc0->mpwldectrl0);
 207        readl(&mmdc0->mpwldectrl1);
 208        if (sysinfo->dsize == 2) {
 209                readl(&mmdc1->mpwldectrl0);
 210                readl(&mmdc1->mpwldectrl1);
 211        }
 212
 213        /* enable DDR logic power down timer: */
 214        setbits_le32(&mmdc0->mdpdc, 0x00005500);
 215
 216        /* Enable Adopt power down timer: */
 217        clrbits_le32(&mmdc0->mapsr, 0x1);
 218
 219        /* Clear CON_REQ */
 220        writel(0, &mmdc0->mdscr);
 221
 222        return errors;
 223}
 224
 225int mmdc_do_dqs_calibration(struct mx6_ddr_sysinfo const *sysinfo)
 226{
 227        struct mmdc_p_regs *mmdc0 = (struct mmdc_p_regs *)MMDC_P0_BASE_ADDR;
 228        struct mmdc_p_regs *mmdc1 = (struct mmdc_p_regs *)MMDC_P1_BASE_ADDR;
 229        struct mx6dq_iomux_ddr_regs *mx6_ddr_iomux =
 230                (struct mx6dq_iomux_ddr_regs *)MX6DQ_IOM_DDR_BASE;
 231        bool cs0_enable;
 232        bool cs1_enable;
 233        bool cs0_enable_initial;
 234        bool cs1_enable_initial;
 235        u32 esdmisc_val;
 236        u32 temp_ref;
 237        u32 pddword = 0x00ffff00; /* best so far, place into MPPDCMPR1 */
 238        u32 errors = 0;
 239        u32 initdelay = 0x40404040;
 240
 241        /* check to see which chip selects are enabled */
 242        cs0_enable_initial = readl(&mmdc0->mdctl) & 0x80000000;
 243        cs1_enable_initial = readl(&mmdc0->mdctl) & 0x40000000;
 244
 245        /* disable DDR logic power down timer: */
 246        clrbits_le32(&mmdc0->mdpdc, 0xff00);
 247
 248        /* disable Adopt power down timer: */
 249        setbits_le32(&mmdc0->mapsr, 0x1);
 250
 251        /* set DQS pull ups */
 252        setbits_le32(&mx6_ddr_iomux->dram_sdqs0, 0x7000);
 253        setbits_le32(&mx6_ddr_iomux->dram_sdqs1, 0x7000);
 254        setbits_le32(&mx6_ddr_iomux->dram_sdqs2, 0x7000);
 255        setbits_le32(&mx6_ddr_iomux->dram_sdqs3, 0x7000);
 256        setbits_le32(&mx6_ddr_iomux->dram_sdqs4, 0x7000);
 257        setbits_le32(&mx6_ddr_iomux->dram_sdqs5, 0x7000);
 258        setbits_le32(&mx6_ddr_iomux->dram_sdqs6, 0x7000);
 259        setbits_le32(&mx6_ddr_iomux->dram_sdqs7, 0x7000);
 260
 261        /* Save old RALAT and WALAT values */
 262        esdmisc_val = readl(&mmdc0->mdmisc);
 263
 264        setbits_le32(&mmdc0->mdmisc,
 265                     (1 << 6) | (1 << 7) | (1 << 8) | (1 << 16) | (1 << 17));
 266
 267        /* Disable auto refresh before proceeding with calibration */
 268        temp_ref = readl(&mmdc0->mdref);
 269        writel(0x0000c000, &mmdc0->mdref);
 270
 271        /*
 272         * Per the ref manual, issue one refresh cycle MDSCR[CMD]= 0x2,
 273         * this also sets the CON_REQ bit.
 274         */
 275        if (cs0_enable_initial)
 276                writel(0x00008020, &mmdc0->mdscr);
 277        if (cs1_enable_initial)
 278                writel(0x00008028, &mmdc0->mdscr);
 279
 280        /* poll to make sure the con_ack bit was asserted */
 281        wait_for_bit("MMDC", &mmdc0->mdscr, 1 << 14, 1, 100, 0);
 282
 283        /*
 284         * Check MDMISC register CALIB_PER_CS to see which CS calibration
 285         * is targeted to (under normal cases, it should be cleared
 286         * as this is the default value, indicating calibration is directed
 287         * to CS0).
 288         * Disable the other chip select not being target for calibration
 289         * to avoid any potential issues.  This will get re-enabled at end
 290         * of calibration.
 291         */
 292        if ((readl(&mmdc0->mdmisc) & 0x00100000) == 0)
 293                clrbits_le32(&mmdc0->mdctl, 1 << 30);   /* clear SDE_1 */
 294        else
 295                clrbits_le32(&mmdc0->mdctl, 1 << 31);   /* clear SDE_0 */
 296
 297        /*
 298         * Check to see which chip selects are now enabled for
 299         * the remainder of the calibration.
 300         */
 301        cs0_enable = readl(&mmdc0->mdctl) & 0x80000000;
 302        cs1_enable = readl(&mmdc0->mdctl) & 0x40000000;
 303
 304        precharge_all(cs0_enable, cs1_enable);
 305
 306        /* Write the pre-defined value into MPPDCMPR1 */
 307        writel(pddword, &mmdc0->mppdcmpr1);
 308
 309        /*
 310         * Issue a write access to the external DDR device by setting
 311         * the bit SW_DUMMY_WR (bit 0) in the MPSWDAR0 and then poll
 312         * this bit until it clears to indicate completion of the write access.
 313         */
 314        setbits_le32(&mmdc0->mpswdar0, 1);
 315        wait_for_bit("MMDC", &mmdc0->mpswdar0, 1 << 0, 0, 100, 0);
 316
 317        /* Set the RD_DL_ABS# bits to their default values
 318         * (will be calibrated later in the read delay-line calibration).
 319         * Both PHYs for x64 configuration, if x32, do only PHY0.
 320         */
 321        writel(initdelay, &mmdc0->mprddlctl);
 322        if (sysinfo->dsize == 0x2)
 323                writel(initdelay, &mmdc1->mprddlctl);
 324
 325        /* Force a measurment, for previous delay setup to take effect. */
 326        force_delay_measurement(sysinfo->dsize);
 327
 328        /*
 329         * ***************************
 330         * Read DQS Gating calibration
 331         * ***************************
 332         */
 333        debug("Starting Read DQS Gating calibration.\n");
 334
 335        /*
 336         * Reset the read data FIFOs (two resets); only need to issue reset
 337         * to PHY0 since in x64 mode, the reset will also go to PHY1.
 338         */
 339        reset_read_data_fifos();
 340
 341        /*
 342         * Start the automatic read DQS gating calibration process by
 343         * asserting MPDGCTRL0[HW_DG_EN] and MPDGCTRL0[DG_CMP_CYC]
 344         * and then poll MPDGCTRL0[HW_DG_EN]] until this bit clears
 345         * to indicate completion.
 346         * Also, ensure that MPDGCTRL0[HW_DG_ERR] is clear to indicate
 347         * no errors were seen during calibration.
 348         */
 349
 350        /*
 351         * Set bit 30: chooses option to wait 32 cycles instead of
 352         * 16 before comparing read data.
 353         */
 354        setbits_le32(&mmdc0->mpdgctrl0, 1 << 30);
 355        if (sysinfo->dsize == 2)
 356                setbits_le32(&mmdc1->mpdgctrl0, 1 << 30);
 357
 358        /* Set bit 28 to start automatic read DQS gating calibration */
 359        setbits_le32(&mmdc0->mpdgctrl0, 5 << 28);
 360
 361        /* Poll for completion.  MPDGCTRL0[HW_DG_EN] should be 0 */
 362        wait_for_bit("MMDC", &mmdc0->mpdgctrl0, 1 << 28, 0, 100, 0);
 363
 364        /*
 365         * Check to see if any errors were encountered during calibration
 366         * (check MPDGCTRL0[HW_DG_ERR]).
 367         * Check both PHYs for x64 configuration, if x32, check only PHY0.
 368         */
 369        if (readl(&mmdc0->mpdgctrl0) & 0x00001000)
 370                errors |= 1;
 371
 372        if ((sysinfo->dsize == 0x2) && (readl(&mmdc1->mpdgctrl0) & 0x00001000))
 373                errors |= 2;
 374
 375        /* now disable mpdgctrl0[DG_CMP_CYC] */
 376        clrbits_le32(&mmdc0->mpdgctrl0, 1 << 30);
 377        if (sysinfo->dsize == 2)
 378                clrbits_le32(&mmdc1->mpdgctrl0, 1 << 30);
 379
 380        /*
 381         * DQS gating absolute offset should be modified from
 382         * reflecting (HW_DG_LOWx + HW_DG_UPx)/2 to
 383         * reflecting (HW_DG_UPx - 0x80)
 384         */
 385        modify_dg_result(&mmdc0->mpdghwst0, &mmdc0->mpdghwst1,
 386                         &mmdc0->mpdgctrl0);
 387        modify_dg_result(&mmdc0->mpdghwst2, &mmdc0->mpdghwst3,
 388                         &mmdc0->mpdgctrl1);
 389        if (sysinfo->dsize == 0x2) {
 390                modify_dg_result(&mmdc1->mpdghwst0, &mmdc1->mpdghwst1,
 391                                 &mmdc1->mpdgctrl0);
 392                modify_dg_result(&mmdc1->mpdghwst2, &mmdc1->mpdghwst3,
 393                                 &mmdc1->mpdgctrl1);
 394        }
 395        debug("Ending Read DQS Gating calibration. Error mask: 0x%x\n", errors);
 396
 397        /*
 398         * **********************
 399         * Read Delay calibration
 400         * **********************
 401         */
 402        debug("Starting Read Delay calibration.\n");
 403
 404        reset_read_data_fifos();
 405
 406        /*
 407         * 4. Issue the Precharge-All command to the DDR device for both
 408         * chip selects.  If only using one chip select, then precharge
 409         * only the desired chip select.
 410         */
 411        precharge_all(cs0_enable, cs1_enable);
 412
 413        /*
 414         * 9. Read delay-line calibration
 415         * Start the automatic read calibration process by asserting
 416         * MPRDDLHWCTL[HW_RD_DL_EN].
 417         */
 418        writel(0x00000030, &mmdc0->mprddlhwctl);
 419
 420        /*
 421         * 10. poll for completion
 422         * MMDC indicates that the write data calibration had finished by
 423         * setting MPRDDLHWCTL[HW_RD_DL_EN] = 0.   Also, ensure that
 424         * no error bits were set.
 425         */
 426        wait_for_bit("MMDC", &mmdc0->mprddlhwctl, 1 << 4, 0, 100, 0);
 427
 428        /* check both PHYs for x64 configuration, if x32, check only PHY0 */
 429        if (readl(&mmdc0->mprddlhwctl) & 0x0000000f)
 430                errors |= 4;
 431
 432        if ((sysinfo->dsize == 0x2) &&
 433            (readl(&mmdc1->mprddlhwctl) & 0x0000000f))
 434                errors |= 8;
 435
 436        debug("Ending Read Delay calibration. Error mask: 0x%x\n", errors);
 437
 438        /*
 439         * ***********************
 440         * Write Delay Calibration
 441         * ***********************
 442         */
 443        debug("Starting Write Delay calibration.\n");
 444
 445        reset_read_data_fifos();
 446
 447        /*
 448         * 4. Issue the Precharge-All command to the DDR device for both
 449         * chip selects. If only using one chip select, then precharge
 450         * only the desired chip select.
 451         */
 452        precharge_all(cs0_enable, cs1_enable);
 453
 454        /*
 455         * 8. Set the WR_DL_ABS# bits to their default values.
 456         * Both PHYs for x64 configuration, if x32, do only PHY0.
 457         */
 458        writel(initdelay, &mmdc0->mpwrdlctl);
 459        if (sysinfo->dsize == 0x2)
 460                writel(initdelay, &mmdc1->mpwrdlctl);
 461
 462        /*
 463         * XXX This isn't in the manual. Force a measurement,
 464         * for previous delay setup to effect.
 465         */
 466        force_delay_measurement(sysinfo->dsize);
 467
 468        /*
 469         * 9. 10. Start the automatic write calibration process
 470         * by asserting MPWRDLHWCTL0[HW_WR_DL_EN].
 471         */
 472        writel(0x00000030, &mmdc0->mpwrdlhwctl);
 473
 474        /*
 475         * Poll for completion.
 476         * MMDC indicates that the write data calibration had finished
 477         * by setting MPWRDLHWCTL[HW_WR_DL_EN] = 0.
 478         * Also, ensure that no error bits were set.
 479         */
 480        wait_for_bit("MMDC", &mmdc0->mpwrdlhwctl, 1 << 4, 0, 100, 0);
 481
 482        /* Check both PHYs for x64 configuration, if x32, check only PHY0 */
 483        if (readl(&mmdc0->mpwrdlhwctl) & 0x0000000f)
 484                errors |= 16;
 485
 486        if ((sysinfo->dsize == 0x2) &&
 487            (readl(&mmdc1->mpwrdlhwctl) & 0x0000000f))
 488                errors |= 32;
 489
 490        debug("Ending Write Delay calibration. Error mask: 0x%x\n", errors);
 491
 492        reset_read_data_fifos();
 493
 494        /* Enable DDR logic power down timer */
 495        setbits_le32(&mmdc0->mdpdc, 0x00005500);
 496
 497        /* Enable Adopt power down timer */
 498        clrbits_le32(&mmdc0->mapsr, 0x1);
 499
 500        /* Restore MDMISC value (RALAT, WALAT) to MMDCP1 */
 501        writel(esdmisc_val, &mmdc0->mdmisc);
 502
 503        /* Clear DQS pull ups */
 504        clrbits_le32(&mx6_ddr_iomux->dram_sdqs0, 0x7000);
 505        clrbits_le32(&mx6_ddr_iomux->dram_sdqs1, 0x7000);
 506        clrbits_le32(&mx6_ddr_iomux->dram_sdqs2, 0x7000);
 507        clrbits_le32(&mx6_ddr_iomux->dram_sdqs3, 0x7000);
 508        clrbits_le32(&mx6_ddr_iomux->dram_sdqs4, 0x7000);
 509        clrbits_le32(&mx6_ddr_iomux->dram_sdqs5, 0x7000);
 510        clrbits_le32(&mx6_ddr_iomux->dram_sdqs6, 0x7000);
 511        clrbits_le32(&mx6_ddr_iomux->dram_sdqs7, 0x7000);
 512
 513        /* Re-enable SDE (chip selects) if they were set initially */
 514        if (cs1_enable_initial)
 515                /* Set SDE_1 */
 516                setbits_le32(&mmdc0->mdctl, 1 << 30);
 517
 518        if (cs0_enable_initial)
 519                /* Set SDE_0 */
 520                setbits_le32(&mmdc0->mdctl, 1 << 31);
 521
 522        /* Re-enable to auto refresh */
 523        writel(temp_ref, &mmdc0->mdref);
 524
 525        /* Clear the MDSCR (including the con_req bit) */
 526        writel(0x0, &mmdc0->mdscr);     /* CS0 */
 527
 528        /* Poll to make sure the con_ack bit is clear */
 529        wait_for_bit("MMDC", &mmdc0->mdscr, 1 << 14, 0, 100, 0);
 530
 531        /*
 532         * Print out the registers that were updated as a result
 533         * of the calibration process.
 534         */
 535        debug("MMDC registers updated from calibration\n");
 536        debug("Read DQS gating calibration:\n");
 537        debug("\tMPDGCTRL0 PHY0 = 0x%08X\n", readl(&mmdc0->mpdgctrl0));
 538        debug("\tMPDGCTRL1 PHY0 = 0x%08X\n", readl(&mmdc0->mpdgctrl1));
 539        if (sysinfo->dsize == 2) {
 540                debug("\tMPDGCTRL0 PHY1 = 0x%08X\n", readl(&mmdc1->mpdgctrl0));
 541                debug("\tMPDGCTRL1 PHY1 = 0x%08X\n", readl(&mmdc1->mpdgctrl1));
 542        }
 543        debug("Read calibration:\n");
 544        debug("\tMPRDDLCTL PHY0 = 0x%08X\n", readl(&mmdc0->mprddlctl));
 545        if (sysinfo->dsize == 2)
 546                debug("\tMPRDDLCTL PHY1 = 0x%08X\n", readl(&mmdc1->mprddlctl));
 547        debug("Write calibration:\n");
 548        debug("\tMPWRDLCTL PHY0 = 0x%08X\n", readl(&mmdc0->mpwrdlctl));
 549        if (sysinfo->dsize == 2)
 550                debug("\tMPWRDLCTL PHY1 = 0x%08X\n", readl(&mmdc1->mpwrdlctl));
 551
 552        /*
 553         * Registers below are for debugging purposes.  These print out
 554         * the upper and lower boundaries captured during
 555         * read DQS gating calibration.
 556         */
 557        debug("Status registers bounds for read DQS gating:\n");
 558        debug("\tMPDGHWST0 PHY0 = 0x%08x\n", readl(&mmdc0->mpdghwst0));
 559        debug("\tMPDGHWST1 PHY0 = 0x%08x\n", readl(&mmdc0->mpdghwst1));
 560        debug("\tMPDGHWST2 PHY0 = 0x%08x\n", readl(&mmdc0->mpdghwst2));
 561        debug("\tMPDGHWST3 PHY0 = 0x%08x\n", readl(&mmdc0->mpdghwst3));
 562        if (sysinfo->dsize == 2) {
 563                debug("\tMPDGHWST0 PHY1 = 0x%08x\n", readl(&mmdc1->mpdghwst0));
 564                debug("\tMPDGHWST1 PHY1 = 0x%08x\n", readl(&mmdc1->mpdghwst1));
 565                debug("\tMPDGHWST2 PHY1 = 0x%08x\n", readl(&mmdc1->mpdghwst2));
 566                debug("\tMPDGHWST3 PHY1 = 0x%08x\n", readl(&mmdc1->mpdghwst3));
 567        }
 568
 569        debug("Final do_dqs_calibration error mask: 0x%x\n", errors);
 570
 571        return errors;
 572}
 573#endif
 574
 575#if defined(CONFIG_MX6SX)
 576/* Configure MX6SX mmdc iomux */
 577void mx6sx_dram_iocfg(unsigned width,
 578                      const struct mx6sx_iomux_ddr_regs *ddr,
 579                      const struct mx6sx_iomux_grp_regs *grp)
 580{
 581        struct mx6sx_iomux_ddr_regs *mx6_ddr_iomux;
 582        struct mx6sx_iomux_grp_regs *mx6_grp_iomux;
 583
 584        mx6_ddr_iomux = (struct mx6sx_iomux_ddr_regs *)MX6SX_IOM_DDR_BASE;
 585        mx6_grp_iomux = (struct mx6sx_iomux_grp_regs *)MX6SX_IOM_GRP_BASE;
 586
 587        /* DDR IO TYPE */
 588        writel(grp->grp_ddr_type, &mx6_grp_iomux->grp_ddr_type);
 589        writel(grp->grp_ddrpke, &mx6_grp_iomux->grp_ddrpke);
 590
 591        /* CLOCK */
 592        writel(ddr->dram_sdclk_0, &mx6_ddr_iomux->dram_sdclk_0);
 593
 594        /* ADDRESS */
 595        writel(ddr->dram_cas, &mx6_ddr_iomux->dram_cas);
 596        writel(ddr->dram_ras, &mx6_ddr_iomux->dram_ras);
 597        writel(grp->grp_addds, &mx6_grp_iomux->grp_addds);
 598
 599        /* Control */
 600        writel(ddr->dram_reset, &mx6_ddr_iomux->dram_reset);
 601        writel(ddr->dram_sdba2, &mx6_ddr_iomux->dram_sdba2);
 602        writel(ddr->dram_sdcke0, &mx6_ddr_iomux->dram_sdcke0);
 603        writel(ddr->dram_sdcke1, &mx6_ddr_iomux->dram_sdcke1);
 604        writel(ddr->dram_odt0, &mx6_ddr_iomux->dram_odt0);
 605        writel(ddr->dram_odt1, &mx6_ddr_iomux->dram_odt1);
 606        writel(grp->grp_ctlds, &mx6_grp_iomux->grp_ctlds);
 607
 608        /* Data Strobes */
 609        writel(grp->grp_ddrmode_ctl, &mx6_grp_iomux->grp_ddrmode_ctl);
 610        writel(ddr->dram_sdqs0, &mx6_ddr_iomux->dram_sdqs0);
 611        writel(ddr->dram_sdqs1, &mx6_ddr_iomux->dram_sdqs1);
 612        if (width >= 32) {
 613                writel(ddr->dram_sdqs2, &mx6_ddr_iomux->dram_sdqs2);
 614                writel(ddr->dram_sdqs3, &mx6_ddr_iomux->dram_sdqs3);
 615        }
 616
 617        /* Data */
 618        writel(grp->grp_ddrmode, &mx6_grp_iomux->grp_ddrmode);
 619        writel(grp->grp_b0ds, &mx6_grp_iomux->grp_b0ds);
 620        writel(grp->grp_b1ds, &mx6_grp_iomux->grp_b1ds);
 621        if (width >= 32) {
 622                writel(grp->grp_b2ds, &mx6_grp_iomux->grp_b2ds);
 623                writel(grp->grp_b3ds, &mx6_grp_iomux->grp_b3ds);
 624        }
 625        writel(ddr->dram_dqm0, &mx6_ddr_iomux->dram_dqm0);
 626        writel(ddr->dram_dqm1, &mx6_ddr_iomux->dram_dqm1);
 627        if (width >= 32) {
 628                writel(ddr->dram_dqm2, &mx6_ddr_iomux->dram_dqm2);
 629                writel(ddr->dram_dqm3, &mx6_ddr_iomux->dram_dqm3);
 630        }
 631}
 632#endif
 633
 634#ifdef CONFIG_MX6UL
 635void mx6ul_dram_iocfg(unsigned width,
 636                      const struct mx6ul_iomux_ddr_regs *ddr,
 637                      const struct mx6ul_iomux_grp_regs *grp)
 638{
 639        struct mx6ul_iomux_ddr_regs *mx6_ddr_iomux;
 640        struct mx6ul_iomux_grp_regs *mx6_grp_iomux;
 641
 642        mx6_ddr_iomux = (struct mx6ul_iomux_ddr_regs *)MX6UL_IOM_DDR_BASE;
 643        mx6_grp_iomux = (struct mx6ul_iomux_grp_regs *)MX6UL_IOM_GRP_BASE;
 644
 645        /* DDR IO TYPE */
 646        writel(grp->grp_ddr_type, &mx6_grp_iomux->grp_ddr_type);
 647        writel(grp->grp_ddrpke, &mx6_grp_iomux->grp_ddrpke);
 648
 649        /* CLOCK */
 650        writel(ddr->dram_sdclk_0, &mx6_ddr_iomux->dram_sdclk_0);
 651
 652        /* ADDRESS */
 653        writel(ddr->dram_cas, &mx6_ddr_iomux->dram_cas);
 654        writel(ddr->dram_ras, &mx6_ddr_iomux->dram_ras);
 655        writel(grp->grp_addds, &mx6_grp_iomux->grp_addds);
 656
 657        /* Control */
 658        writel(ddr->dram_reset, &mx6_ddr_iomux->dram_reset);
 659        writel(ddr->dram_sdba2, &mx6_ddr_iomux->dram_sdba2);
 660        writel(ddr->dram_odt0, &mx6_ddr_iomux->dram_odt0);
 661        writel(ddr->dram_odt1, &mx6_ddr_iomux->dram_odt1);
 662        writel(grp->grp_ctlds, &mx6_grp_iomux->grp_ctlds);
 663
 664        /* Data Strobes */
 665        writel(grp->grp_ddrmode_ctl, &mx6_grp_iomux->grp_ddrmode_ctl);
 666        writel(ddr->dram_sdqs0, &mx6_ddr_iomux->dram_sdqs0);
 667        writel(ddr->dram_sdqs1, &mx6_ddr_iomux->dram_sdqs1);
 668
 669        /* Data */
 670        writel(grp->grp_ddrmode, &mx6_grp_iomux->grp_ddrmode);
 671        writel(grp->grp_b0ds, &mx6_grp_iomux->grp_b0ds);
 672        writel(grp->grp_b1ds, &mx6_grp_iomux->grp_b1ds);
 673        writel(ddr->dram_dqm0, &mx6_ddr_iomux->dram_dqm0);
 674        writel(ddr->dram_dqm1, &mx6_ddr_iomux->dram_dqm1);
 675}
 676#endif
 677
 678#if defined(CONFIG_MX6SL)
 679void mx6sl_dram_iocfg(unsigned width,
 680                      const struct mx6sl_iomux_ddr_regs *ddr,
 681                      const struct mx6sl_iomux_grp_regs *grp)
 682{
 683        struct mx6sl_iomux_ddr_regs *mx6_ddr_iomux;
 684        struct mx6sl_iomux_grp_regs *mx6_grp_iomux;
 685
 686        mx6_ddr_iomux = (struct mx6sl_iomux_ddr_regs *)MX6SL_IOM_DDR_BASE;
 687        mx6_grp_iomux = (struct mx6sl_iomux_grp_regs *)MX6SL_IOM_GRP_BASE;
 688
 689        /* DDR IO TYPE */
 690        mx6_grp_iomux->grp_ddr_type = grp->grp_ddr_type;
 691        mx6_grp_iomux->grp_ddrpke = grp->grp_ddrpke;
 692
 693        /* CLOCK */
 694        mx6_ddr_iomux->dram_sdclk_0 = ddr->dram_sdclk_0;
 695
 696        /* ADDRESS */
 697        mx6_ddr_iomux->dram_cas = ddr->dram_cas;
 698        mx6_ddr_iomux->dram_ras = ddr->dram_ras;
 699        mx6_grp_iomux->grp_addds = grp->grp_addds;
 700
 701        /* Control */
 702        mx6_ddr_iomux->dram_reset = ddr->dram_reset;
 703        mx6_ddr_iomux->dram_sdba2 = ddr->dram_sdba2;
 704        mx6_grp_iomux->grp_ctlds = grp->grp_ctlds;
 705
 706        /* Data Strobes */
 707        mx6_grp_iomux->grp_ddrmode_ctl = grp->grp_ddrmode_ctl;
 708        mx6_ddr_iomux->dram_sdqs0 = ddr->dram_sdqs0;
 709        mx6_ddr_iomux->dram_sdqs1 = ddr->dram_sdqs1;
 710        if (width >= 32) {
 711                mx6_ddr_iomux->dram_sdqs2 = ddr->dram_sdqs2;
 712                mx6_ddr_iomux->dram_sdqs3 = ddr->dram_sdqs3;
 713        }
 714
 715        /* Data */
 716        mx6_grp_iomux->grp_ddrmode = grp->grp_ddrmode;
 717        mx6_grp_iomux->grp_b0ds = grp->grp_b0ds;
 718        mx6_grp_iomux->grp_b1ds = grp->grp_b1ds;
 719        if (width >= 32) {
 720                mx6_grp_iomux->grp_b2ds = grp->grp_b2ds;
 721                mx6_grp_iomux->grp_b3ds = grp->grp_b3ds;
 722        }
 723
 724        mx6_ddr_iomux->dram_dqm0 = ddr->dram_dqm0;
 725        mx6_ddr_iomux->dram_dqm1 = ddr->dram_dqm1;
 726        if (width >= 32) {
 727                mx6_ddr_iomux->dram_dqm2 = ddr->dram_dqm2;
 728                mx6_ddr_iomux->dram_dqm3 = ddr->dram_dqm3;
 729        }
 730}
 731#endif
 732
 733#if defined(CONFIG_MX6QDL) || defined(CONFIG_MX6Q) || defined(CONFIG_MX6D)
 734/* Configure MX6DQ mmdc iomux */
 735void mx6dq_dram_iocfg(unsigned width,
 736                      const struct mx6dq_iomux_ddr_regs *ddr,
 737                      const struct mx6dq_iomux_grp_regs *grp)
 738{
 739        volatile struct mx6dq_iomux_ddr_regs *mx6_ddr_iomux;
 740        volatile struct mx6dq_iomux_grp_regs *mx6_grp_iomux;
 741
 742        mx6_ddr_iomux = (struct mx6dq_iomux_ddr_regs *)MX6DQ_IOM_DDR_BASE;
 743        mx6_grp_iomux = (struct mx6dq_iomux_grp_regs *)MX6DQ_IOM_GRP_BASE;
 744
 745        /* DDR IO Type */
 746        mx6_grp_iomux->grp_ddr_type = grp->grp_ddr_type;
 747        mx6_grp_iomux->grp_ddrpke = grp->grp_ddrpke;
 748
 749        /* Clock */
 750        mx6_ddr_iomux->dram_sdclk_0 = ddr->dram_sdclk_0;
 751        mx6_ddr_iomux->dram_sdclk_1 = ddr->dram_sdclk_1;
 752
 753        /* Address */
 754        mx6_ddr_iomux->dram_cas = ddr->dram_cas;
 755        mx6_ddr_iomux->dram_ras = ddr->dram_ras;
 756        mx6_grp_iomux->grp_addds = grp->grp_addds;
 757
 758        /* Control */
 759        mx6_ddr_iomux->dram_reset = ddr->dram_reset;
 760        mx6_ddr_iomux->dram_sdcke0 = ddr->dram_sdcke0;
 761        mx6_ddr_iomux->dram_sdcke1 = ddr->dram_sdcke1;
 762        mx6_ddr_iomux->dram_sdba2 = ddr->dram_sdba2;
 763        mx6_ddr_iomux->dram_sdodt0 = ddr->dram_sdodt0;
 764        mx6_ddr_iomux->dram_sdodt1 = ddr->dram_sdodt1;
 765        mx6_grp_iomux->grp_ctlds = grp->grp_ctlds;
 766
 767        /* Data Strobes */
 768        mx6_grp_iomux->grp_ddrmode_ctl = grp->grp_ddrmode_ctl;
 769        mx6_ddr_iomux->dram_sdqs0 = ddr->dram_sdqs0;
 770        mx6_ddr_iomux->dram_sdqs1 = ddr->dram_sdqs1;
 771        if (width >= 32) {
 772                mx6_ddr_iomux->dram_sdqs2 = ddr->dram_sdqs2;
 773                mx6_ddr_iomux->dram_sdqs3 = ddr->dram_sdqs3;
 774        }
 775        if (width >= 64) {
 776                mx6_ddr_iomux->dram_sdqs4 = ddr->dram_sdqs4;
 777                mx6_ddr_iomux->dram_sdqs5 = ddr->dram_sdqs5;
 778                mx6_ddr_iomux->dram_sdqs6 = ddr->dram_sdqs6;
 779                mx6_ddr_iomux->dram_sdqs7 = ddr->dram_sdqs7;
 780        }
 781
 782        /* Data */
 783        mx6_grp_iomux->grp_ddrmode = grp->grp_ddrmode;
 784        mx6_grp_iomux->grp_b0ds = grp->grp_b0ds;
 785        mx6_grp_iomux->grp_b1ds = grp->grp_b1ds;
 786        if (width >= 32) {
 787                mx6_grp_iomux->grp_b2ds = grp->grp_b2ds;
 788                mx6_grp_iomux->grp_b3ds = grp->grp_b3ds;
 789        }
 790        if (width >= 64) {
 791                mx6_grp_iomux->grp_b4ds = grp->grp_b4ds;
 792                mx6_grp_iomux->grp_b5ds = grp->grp_b5ds;
 793                mx6_grp_iomux->grp_b6ds = grp->grp_b6ds;
 794                mx6_grp_iomux->grp_b7ds = grp->grp_b7ds;
 795        }
 796        mx6_ddr_iomux->dram_dqm0 = ddr->dram_dqm0;
 797        mx6_ddr_iomux->dram_dqm1 = ddr->dram_dqm1;
 798        if (width >= 32) {
 799                mx6_ddr_iomux->dram_dqm2 = ddr->dram_dqm2;
 800                mx6_ddr_iomux->dram_dqm3 = ddr->dram_dqm3;
 801        }
 802        if (width >= 64) {
 803                mx6_ddr_iomux->dram_dqm4 = ddr->dram_dqm4;
 804                mx6_ddr_iomux->dram_dqm5 = ddr->dram_dqm5;
 805                mx6_ddr_iomux->dram_dqm6 = ddr->dram_dqm6;
 806                mx6_ddr_iomux->dram_dqm7 = ddr->dram_dqm7;
 807        }
 808}
 809#endif
 810
 811#if defined(CONFIG_MX6QDL) || defined(CONFIG_MX6DL) || defined(CONFIG_MX6S)
 812/* Configure MX6SDL mmdc iomux */
 813void mx6sdl_dram_iocfg(unsigned width,
 814                       const struct mx6sdl_iomux_ddr_regs *ddr,
 815                       const struct mx6sdl_iomux_grp_regs *grp)
 816{
 817        volatile struct mx6sdl_iomux_ddr_regs *mx6_ddr_iomux;
 818        volatile struct mx6sdl_iomux_grp_regs *mx6_grp_iomux;
 819
 820        mx6_ddr_iomux = (struct mx6sdl_iomux_ddr_regs *)MX6SDL_IOM_DDR_BASE;
 821        mx6_grp_iomux = (struct mx6sdl_iomux_grp_regs *)MX6SDL_IOM_GRP_BASE;
 822
 823        /* DDR IO Type */
 824        mx6_grp_iomux->grp_ddr_type = grp->grp_ddr_type;
 825        mx6_grp_iomux->grp_ddrpke = grp->grp_ddrpke;
 826
 827        /* Clock */
 828        mx6_ddr_iomux->dram_sdclk_0 = ddr->dram_sdclk_0;
 829        mx6_ddr_iomux->dram_sdclk_1 = ddr->dram_sdclk_1;
 830
 831        /* Address */
 832        mx6_ddr_iomux->dram_cas = ddr->dram_cas;
 833        mx6_ddr_iomux->dram_ras = ddr->dram_ras;
 834        mx6_grp_iomux->grp_addds = grp->grp_addds;
 835
 836        /* Control */
 837        mx6_ddr_iomux->dram_reset = ddr->dram_reset;
 838        mx6_ddr_iomux->dram_sdcke0 = ddr->dram_sdcke0;
 839        mx6_ddr_iomux->dram_sdcke1 = ddr->dram_sdcke1;
 840        mx6_ddr_iomux->dram_sdba2 = ddr->dram_sdba2;
 841        mx6_ddr_iomux->dram_sdodt0 = ddr->dram_sdodt0;
 842        mx6_ddr_iomux->dram_sdodt1 = ddr->dram_sdodt1;
 843        mx6_grp_iomux->grp_ctlds = grp->grp_ctlds;
 844
 845        /* Data Strobes */
 846        mx6_grp_iomux->grp_ddrmode_ctl = grp->grp_ddrmode_ctl;
 847        mx6_ddr_iomux->dram_sdqs0 = ddr->dram_sdqs0;
 848        mx6_ddr_iomux->dram_sdqs1 = ddr->dram_sdqs1;
 849        if (width >= 32) {
 850                mx6_ddr_iomux->dram_sdqs2 = ddr->dram_sdqs2;
 851                mx6_ddr_iomux->dram_sdqs3 = ddr->dram_sdqs3;
 852        }
 853        if (width >= 64) {
 854                mx6_ddr_iomux->dram_sdqs4 = ddr->dram_sdqs4;
 855                mx6_ddr_iomux->dram_sdqs5 = ddr->dram_sdqs5;
 856                mx6_ddr_iomux->dram_sdqs6 = ddr->dram_sdqs6;
 857                mx6_ddr_iomux->dram_sdqs7 = ddr->dram_sdqs7;
 858        }
 859
 860        /* Data */
 861        mx6_grp_iomux->grp_ddrmode = grp->grp_ddrmode;
 862        mx6_grp_iomux->grp_b0ds = grp->grp_b0ds;
 863        mx6_grp_iomux->grp_b1ds = grp->grp_b1ds;
 864        if (width >= 32) {
 865                mx6_grp_iomux->grp_b2ds = grp->grp_b2ds;
 866                mx6_grp_iomux->grp_b3ds = grp->grp_b3ds;
 867        }
 868        if (width >= 64) {
 869                mx6_grp_iomux->grp_b4ds = grp->grp_b4ds;
 870                mx6_grp_iomux->grp_b5ds = grp->grp_b5ds;
 871                mx6_grp_iomux->grp_b6ds = grp->grp_b6ds;
 872                mx6_grp_iomux->grp_b7ds = grp->grp_b7ds;
 873        }
 874        mx6_ddr_iomux->dram_dqm0 = ddr->dram_dqm0;
 875        mx6_ddr_iomux->dram_dqm1 = ddr->dram_dqm1;
 876        if (width >= 32) {
 877                mx6_ddr_iomux->dram_dqm2 = ddr->dram_dqm2;
 878                mx6_ddr_iomux->dram_dqm3 = ddr->dram_dqm3;
 879        }
 880        if (width >= 64) {
 881                mx6_ddr_iomux->dram_dqm4 = ddr->dram_dqm4;
 882                mx6_ddr_iomux->dram_dqm5 = ddr->dram_dqm5;
 883                mx6_ddr_iomux->dram_dqm6 = ddr->dram_dqm6;
 884                mx6_ddr_iomux->dram_dqm7 = ddr->dram_dqm7;
 885        }
 886}
 887#endif
 888
 889/*
 890 * Configure mx6 mmdc registers based on:
 891 *  - board-specific memory configuration
 892 *  - board-specific calibration data
 893 *  - ddr3/lpddr2 chip details
 894 *
 895 * The various calculations here are derived from the Freescale
 896 * 1. i.Mx6DQSDL DDR3 Script Aid spreadsheet (DOC-94917) designed to generate
 897 *    MMDC configuration registers based on memory system and memory chip
 898 *    parameters.
 899 *
 900 * 2. i.Mx6SL LPDDR2 Script Aid spreadsheet V0.04 designed to generate MMDC
 901 *    configuration registers based on memory system and memory chip
 902 *    parameters.
 903 *
 904 * The defaults here are those which were specified in the spreadsheet.
 905 * For details on each register, refer to the IMX6DQRM and/or IMX6SDLRM
 906 * and/or IMX6SLRM section titled MMDC initialization.
 907 */
 908#define MR(val, ba, cmd, cs1) \
 909        ((val << 16) | (1 << 15) | (cmd << 4) | (cs1 << 3) | ba)
 910#define MMDC1(entry, value) do {                                          \
 911        if (!is_mx6sx() && !is_mx6ul() && !is_mx6sl())                    \
 912                mmdc1->entry = value;                                     \
 913        } while (0)
 914
 915/*
 916 * According JESD209-2B-LPDDR2: Table 103
 917 * WL: write latency
 918 */
 919static int lpddr2_wl(uint32_t mem_speed)
 920{
 921        switch (mem_speed) {
 922        case 1066:
 923        case 933:
 924                return 4;
 925        case 800:
 926                return 3;
 927        case 677:
 928        case 533:
 929                return 2;
 930        case 400:
 931        case 333:
 932                return 1;
 933        default:
 934                puts("invalid memory speed\n");
 935                hang();
 936        }
 937
 938        return 0;
 939}
 940
 941/*
 942 * According JESD209-2B-LPDDR2: Table 103
 943 * RL: read latency
 944 */
 945static int lpddr2_rl(uint32_t mem_speed)
 946{
 947        switch (mem_speed) {
 948        case 1066:
 949                return 8;
 950        case 933:
 951                return 7;
 952        case 800:
 953                return 6;
 954        case 677:
 955                return 5;
 956        case 533:
 957                return 4;
 958        case 400:
 959        case 333:
 960                return 3;
 961        default:
 962                puts("invalid memory speed\n");
 963                hang();
 964        }
 965
 966        return 0;
 967}
 968
 969void mx6_lpddr2_cfg(const struct mx6_ddr_sysinfo *sysinfo,
 970                    const struct mx6_mmdc_calibration *calib,
 971                    const struct mx6_lpddr2_cfg *lpddr2_cfg)
 972{
 973        volatile struct mmdc_p_regs *mmdc0;
 974        u32 val;
 975        u8 tcke, tcksrx, tcksre, trrd;
 976        u8 twl, txp, tfaw, tcl;
 977        u16 tras, twr, tmrd, trtp, twtr, trfc, txsr;
 978        u16 trcd_lp, trppb_lp, trpab_lp, trc_lp;
 979        u16 cs0_end;
 980        u8 coladdr;
 981        int clkper; /* clock period in picoseconds */
 982        int clock;  /* clock freq in mHz */
 983        int cs;
 984
 985        /* only support 16/32 bits */
 986        if (sysinfo->dsize > 1)
 987                hang();
 988
 989        mmdc0 = (struct mmdc_p_regs *)MMDC_P0_BASE_ADDR;
 990
 991        clock = mxc_get_clock(MXC_DDR_CLK) / 1000000U;
 992        clkper = (1000 * 1000) / clock; /* pico seconds */
 993
 994        twl = lpddr2_wl(lpddr2_cfg->mem_speed) - 1;
 995
 996        /* LPDDR2-S2 and LPDDR2-S4 have the same tRFC value. */
 997        switch (lpddr2_cfg->density) {
 998        case 1:
 999        case 2:
1000        case 4:
1001                trfc = DIV_ROUND_UP(130000, clkper) - 1;
1002                txsr = DIV_ROUND_UP(140000, clkper) - 1;
1003                break;
1004        case 8:
1005                trfc = DIV_ROUND_UP(210000, clkper) - 1;
1006                txsr = DIV_ROUND_UP(220000, clkper) - 1;
1007                break;
1008        default:
1009                /*
1010                 * 64Mb, 128Mb, 256Mb, 512Mb are not supported currently.
1011                 */
1012                hang();
1013                break;
1014        }
1015        /*
1016         * txpdll, txpr, taonpd and taofpd are not relevant in LPDDR2 mode,
1017         * set them to 0. */
1018        txp = DIV_ROUND_UP(7500, clkper) - 1;
1019        tcke = 3;
1020        if (lpddr2_cfg->mem_speed == 333)
1021                tfaw = DIV_ROUND_UP(60000, clkper) - 1;
1022        else
1023                tfaw = DIV_ROUND_UP(50000, clkper) - 1;
1024        trrd = DIV_ROUND_UP(10000, clkper) - 1;
1025
1026        /* tckesr for LPDDR2 */
1027        tcksre = DIV_ROUND_UP(15000, clkper);
1028        tcksrx = tcksre;
1029        twr  = DIV_ROUND_UP(15000, clkper) - 1;
1030        /*
1031         * tMRR: 2, tMRW: 5
1032         * tMRD should be set to max(tMRR, tMRW)
1033         */
1034        tmrd = 5;
1035        tras = DIV_ROUND_UP(lpddr2_cfg->trasmin, clkper / 10) - 1;
1036        /* LPDDR2 mode use tRCD_LP filed in MDCFG3. */
1037        trcd_lp = DIV_ROUND_UP(lpddr2_cfg->trcd_lp, clkper / 10) - 1;
1038        trc_lp = DIV_ROUND_UP(lpddr2_cfg->trasmin + lpddr2_cfg->trppb_lp,
1039                              clkper / 10) - 1;
1040        trppb_lp = DIV_ROUND_UP(lpddr2_cfg->trppb_lp, clkper / 10) - 1;
1041        trpab_lp = DIV_ROUND_UP(lpddr2_cfg->trpab_lp, clkper / 10) - 1;
1042        /* To LPDDR2, CL in MDCFG0 refers to RL */
1043        tcl = lpddr2_rl(lpddr2_cfg->mem_speed) - 3;
1044        twtr = DIV_ROUND_UP(7500, clkper) - 1;
1045        trtp = DIV_ROUND_UP(7500, clkper) - 1;
1046
1047        cs0_end = 4 * sysinfo->cs_density - 1;
1048
1049        debug("density:%d Gb (%d Gb per chip)\n",
1050              sysinfo->cs_density, lpddr2_cfg->density);
1051        debug("clock: %dMHz (%d ps)\n", clock, clkper);
1052        debug("memspd:%d\n", lpddr2_cfg->mem_speed);
1053        debug("trcd_lp=%d\n", trcd_lp);
1054        debug("trppb_lp=%d\n", trppb_lp);
1055        debug("trpab_lp=%d\n", trpab_lp);
1056        debug("trc_lp=%d\n", trc_lp);
1057        debug("tcke=%d\n", tcke);
1058        debug("tcksrx=%d\n", tcksrx);
1059        debug("tcksre=%d\n", tcksre);
1060        debug("trfc=%d\n", trfc);
1061        debug("txsr=%d\n", txsr);
1062        debug("txp=%d\n", txp);
1063        debug("tfaw=%d\n", tfaw);
1064        debug("tcl=%d\n", tcl);
1065        debug("tras=%d\n", tras);
1066        debug("twr=%d\n", twr);
1067        debug("tmrd=%d\n", tmrd);
1068        debug("twl=%d\n", twl);
1069        debug("trtp=%d\n", trtp);
1070        debug("twtr=%d\n", twtr);
1071        debug("trrd=%d\n", trrd);
1072        debug("cs0_end=%d\n", cs0_end);
1073        debug("ncs=%d\n", sysinfo->ncs);
1074
1075        /*
1076         * board-specific configuration:
1077         *  These values are determined empirically and vary per board layout
1078         */
1079        mmdc0->mpwldectrl0 = calib->p0_mpwldectrl0;
1080        mmdc0->mpwldectrl1 = calib->p0_mpwldectrl1;
1081        mmdc0->mpdgctrl0 = calib->p0_mpdgctrl0;
1082        mmdc0->mpdgctrl1 = calib->p0_mpdgctrl1;
1083        mmdc0->mprddlctl = calib->p0_mprddlctl;
1084        mmdc0->mpwrdlctl = calib->p0_mpwrdlctl;
1085        mmdc0->mpzqlp2ctl = calib->mpzqlp2ctl;
1086
1087        /* Read data DQ Byte0-3 delay */
1088        mmdc0->mprddqby0dl = 0x33333333;
1089        mmdc0->mprddqby1dl = 0x33333333;
1090        if (sysinfo->dsize > 0) {
1091                mmdc0->mprddqby2dl = 0x33333333;
1092                mmdc0->mprddqby3dl = 0x33333333;
1093        }
1094
1095        /* Write data DQ Byte0-3 delay */
1096        mmdc0->mpwrdqby0dl = 0xf3333333;
1097        mmdc0->mpwrdqby1dl = 0xf3333333;
1098        if (sysinfo->dsize > 0) {
1099                mmdc0->mpwrdqby2dl = 0xf3333333;
1100                mmdc0->mpwrdqby3dl = 0xf3333333;
1101        }
1102
1103        /*
1104         * In LPDDR2 mode this register should be cleared,
1105         * so no termination will be activated.
1106         */
1107        mmdc0->mpodtctrl = 0;
1108
1109        /* complete calibration */
1110        val = (1 << 11); /* Force measurement on delay-lines */
1111        mmdc0->mpmur0 = val;
1112
1113        /* Step 1: configuration request */
1114        mmdc0->mdscr = (u32)(1 << 15); /* config request */
1115
1116        /* Step 2: Timing configuration */
1117        mmdc0->mdcfg0 = (trfc << 24) | (txsr << 16) | (txp << 13) |
1118                        (tfaw << 4) | tcl;
1119        mmdc0->mdcfg1 = (tras << 16) | (twr << 9) | (tmrd << 5) | twl;
1120        mmdc0->mdcfg2 = (trtp << 6) | (twtr << 3) | trrd;
1121        mmdc0->mdcfg3lp = (trc_lp << 16) | (trcd_lp << 8) |
1122                          (trppb_lp << 4) | trpab_lp;
1123        mmdc0->mdotc = 0;
1124
1125        mmdc0->mdasp = cs0_end; /* CS addressing */
1126
1127        /* Step 3: Configure DDR type */
1128        mmdc0->mdmisc = (sysinfo->cs1_mirror << 19) | (sysinfo->walat << 16) |
1129                        (sysinfo->bi_on << 12) | (sysinfo->mif3_mode << 9) |
1130                        (sysinfo->ralat << 6) | (1 << 3);
1131
1132        /* Step 4: Configure delay while leaving reset */
1133        mmdc0->mdor = (sysinfo->sde_to_rst << 8) |
1134                      (sysinfo->rst_to_cke << 0);
1135
1136        /* Step 5: Configure DDR physical parameters (density and burst len) */
1137        coladdr = lpddr2_cfg->coladdr;
1138        if (lpddr2_cfg->coladdr == 8)           /* 8-bit COL is 0x3 */
1139                coladdr += 4;
1140        else if (lpddr2_cfg->coladdr == 12)     /* 12-bit COL is 0x4 */
1141                coladdr += 1;
1142        mmdc0->mdctl =  (lpddr2_cfg->rowaddr - 11) << 24 |      /* ROW */
1143                        (coladdr - 9) << 20 |                   /* COL */
1144                        (0 << 19) |     /* Burst Length = 4 for LPDDR2 */
1145                        (sysinfo->dsize << 16); /* DDR data bus size */
1146
1147        /* Step 6: Perform ZQ calibration */
1148        val = 0xa1390003; /* one-time HW ZQ calib */
1149        mmdc0->mpzqhwctrl = val;
1150
1151        /* Step 7: Enable MMDC with desired chip select */
1152        mmdc0->mdctl |= (1 << 31) |                          /* SDE_0 for CS0 */
1153                        ((sysinfo->ncs == 2) ? 1 : 0) << 30; /* SDE_1 for CS1 */
1154
1155        /* Step 8: Write Mode Registers to Init LPDDR2 devices */
1156        for (cs = 0; cs < sysinfo->ncs; cs++) {
1157                /* MR63: reset */
1158                mmdc0->mdscr = MR(63, 0, 3, cs);
1159                /* MR10: calibration,
1160                 * 0xff is calibration command after intilization.
1161                 */
1162                val = 0xA | (0xff << 8);
1163                mmdc0->mdscr = MR(val, 0, 3, cs);
1164                /* MR1 */
1165                val = 0x1 | (0x82 << 8);
1166                mmdc0->mdscr = MR(val, 0, 3, cs);
1167                /* MR2 */
1168                val = 0x2 | (0x04 << 8);
1169                mmdc0->mdscr = MR(val, 0, 3, cs);
1170                /* MR3 */
1171                val = 0x3 | (0x02 << 8);
1172                mmdc0->mdscr = MR(val, 0, 3, cs);
1173        }
1174
1175        /* Step 10: Power down control and self-refresh */
1176        mmdc0->mdpdc = (tcke & 0x7) << 16 |
1177                        5            << 12 |  /* PWDT_1: 256 cycles */
1178                        5            <<  8 |  /* PWDT_0: 256 cycles */
1179                        1            <<  6 |  /* BOTH_CS_PD */
1180                        (tcksrx & 0x7) << 3 |
1181                        (tcksre & 0x7);
1182        mmdc0->mapsr = 0x00001006; /* ADOPT power down enabled */
1183
1184        /* Step 11: Configure ZQ calibration: one-time and periodic 1ms */
1185        val = 0xa1310003;
1186        mmdc0->mpzqhwctrl = val;
1187
1188        /* Step 12: Configure and activate periodic refresh */
1189        mmdc0->mdref = (sysinfo->refsel << 14) | (sysinfo->refr << 11);
1190
1191        /* Step 13: Deassert config request - init complete */
1192        mmdc0->mdscr = 0x00000000;
1193
1194        /* wait for auto-ZQ calibration to complete */
1195        mdelay(1);
1196}
1197
1198void mx6_ddr3_cfg(const struct mx6_ddr_sysinfo *sysinfo,
1199                  const struct mx6_mmdc_calibration *calib,
1200                  const struct mx6_ddr3_cfg *ddr3_cfg)
1201{
1202        volatile struct mmdc_p_regs *mmdc0;
1203        volatile struct mmdc_p_regs *mmdc1;
1204        u32 val;
1205        u8 tcke, tcksrx, tcksre, txpdll, taofpd, taonpd, trrd;
1206        u8 todtlon, taxpd, tanpd, tcwl, txp, tfaw, tcl;
1207        u8 todt_idle_off = 0x4; /* from DDR3 Script Aid spreadsheet */
1208        u16 trcd, trc, tras, twr, tmrd, trtp, trp, twtr, trfc, txs, txpr;
1209        u16 cs0_end;
1210        u16 tdllk = 0x1ff; /* DLL locking time: 512 cycles (JEDEC DDR3) */
1211        u8 coladdr;
1212        int clkper; /* clock period in picoseconds */
1213        int clock; /* clock freq in MHz */
1214        int cs;
1215        u16 mem_speed = ddr3_cfg->mem_speed;
1216
1217        mmdc0 = (struct mmdc_p_regs *)MMDC_P0_BASE_ADDR;
1218        if (!is_mx6sx() && !is_mx6ul() && !is_mx6sl())
1219                mmdc1 = (struct mmdc_p_regs *)MMDC_P1_BASE_ADDR;
1220
1221        /* Limit mem_speed for MX6D/MX6Q */
1222        if (is_mx6dq() || is_mx6dqp()) {
1223                if (mem_speed > 1066)
1224                        mem_speed = 1066; /* 1066 MT/s */
1225
1226                tcwl = 4;
1227        }
1228        /* Limit mem_speed for MX6S/MX6DL */
1229        else {
1230                if (mem_speed > 800)
1231                        mem_speed = 800;  /* 800 MT/s */
1232
1233                tcwl = 3;
1234        }
1235
1236        clock = mem_speed / 2;
1237        /*
1238         * Data rate of 1066 MT/s requires 533 MHz DDR3 clock, but MX6D/Q supports
1239         * up to 528 MHz, so reduce the clock to fit chip specs
1240         */
1241        if (is_mx6dq() || is_mx6dqp()) {
1242                if (clock > 528)
1243                        clock = 528; /* 528 MHz */
1244        }
1245
1246        clkper = (1000 * 1000) / clock; /* pico seconds */
1247        todtlon = tcwl;
1248        taxpd = tcwl;
1249        tanpd = tcwl;
1250
1251        switch (ddr3_cfg->density) {
1252        case 1: /* 1Gb per chip */
1253                trfc = DIV_ROUND_UP(110000, clkper) - 1;
1254                txs = DIV_ROUND_UP(120000, clkper) - 1;
1255                break;
1256        case 2: /* 2Gb per chip */
1257                trfc = DIV_ROUND_UP(160000, clkper) - 1;
1258                txs = DIV_ROUND_UP(170000, clkper) - 1;
1259                break;
1260        case 4: /* 4Gb per chip */
1261                trfc = DIV_ROUND_UP(260000, clkper) - 1;
1262                txs = DIV_ROUND_UP(270000, clkper) - 1;
1263                break;
1264        case 8: /* 8Gb per chip */
1265                trfc = DIV_ROUND_UP(350000, clkper) - 1;
1266                txs = DIV_ROUND_UP(360000, clkper) - 1;
1267                break;
1268        default:
1269                /* invalid density */
1270                puts("invalid chip density\n");
1271                hang();
1272                break;
1273        }
1274        txpr = txs;
1275
1276        switch (mem_speed) {
1277        case 800:
1278                txp = DIV_ROUND_UP(max(3 * clkper, 7500), clkper) - 1;
1279                tcke = DIV_ROUND_UP(max(3 * clkper, 7500), clkper) - 1;
1280                if (ddr3_cfg->pagesz == 1) {
1281                        tfaw = DIV_ROUND_UP(40000, clkper) - 1;
1282                        trrd = DIV_ROUND_UP(max(4 * clkper, 10000), clkper) - 1;
1283                } else {
1284                        tfaw = DIV_ROUND_UP(50000, clkper) - 1;
1285                        trrd = DIV_ROUND_UP(max(4 * clkper, 10000), clkper) - 1;
1286                }
1287                break;
1288        case 1066:
1289                txp = DIV_ROUND_UP(max(3 * clkper, 7500), clkper) - 1;
1290                tcke = DIV_ROUND_UP(max(3 * clkper, 5625), clkper) - 1;
1291                if (ddr3_cfg->pagesz == 1) {
1292                        tfaw = DIV_ROUND_UP(37500, clkper) - 1;
1293                        trrd = DIV_ROUND_UP(max(4 * clkper, 7500), clkper) - 1;
1294                } else {
1295                        tfaw = DIV_ROUND_UP(50000, clkper) - 1;
1296                        trrd = DIV_ROUND_UP(max(4 * clkper, 10000), clkper) - 1;
1297                }
1298                break;
1299        default:
1300                puts("invalid memory speed\n");
1301                hang();
1302                break;
1303        }
1304        txpdll = DIV_ROUND_UP(max(10 * clkper, 24000), clkper) - 1;
1305        tcksre = DIV_ROUND_UP(max(5 * clkper, 10000), clkper);
1306        taonpd = DIV_ROUND_UP(2000, clkper) - 1;
1307        tcksrx = tcksre;
1308        taofpd = taonpd;
1309        twr  = DIV_ROUND_UP(15000, clkper) - 1;
1310        tmrd = DIV_ROUND_UP(max(12 * clkper, 15000), clkper) - 1;
1311        trc  = DIV_ROUND_UP(ddr3_cfg->trcmin, clkper / 10) - 1;
1312        tras = DIV_ROUND_UP(ddr3_cfg->trasmin, clkper / 10) - 1;
1313        tcl  = DIV_ROUND_UP(ddr3_cfg->trcd, clkper / 10) - 3;
1314        trp  = DIV_ROUND_UP(ddr3_cfg->trcd, clkper / 10) - 1;
1315        twtr = ROUND(max(4 * clkper, 7500) / clkper, 1) - 1;
1316        trcd = trp;
1317        trtp = twtr;
1318        cs0_end = 4 * sysinfo->cs_density - 1;
1319
1320        debug("density:%d Gb (%d Gb per chip)\n",
1321              sysinfo->cs_density, ddr3_cfg->density);
1322        debug("clock: %dMHz (%d ps)\n", clock, clkper);
1323        debug("memspd:%d\n", mem_speed);
1324        debug("tcke=%d\n", tcke);
1325        debug("tcksrx=%d\n", tcksrx);
1326        debug("tcksre=%d\n", tcksre);
1327        debug("taofpd=%d\n", taofpd);
1328        debug("taonpd=%d\n", taonpd);
1329        debug("todtlon=%d\n", todtlon);
1330        debug("tanpd=%d\n", tanpd);
1331        debug("taxpd=%d\n", taxpd);
1332        debug("trfc=%d\n", trfc);
1333        debug("txs=%d\n", txs);
1334        debug("txp=%d\n", txp);
1335        debug("txpdll=%d\n", txpdll);
1336        debug("tfaw=%d\n", tfaw);
1337        debug("tcl=%d\n", tcl);
1338        debug("trcd=%d\n", trcd);
1339        debug("trp=%d\n", trp);
1340        debug("trc=%d\n", trc);
1341        debug("tras=%d\n", tras);
1342        debug("twr=%d\n", twr);
1343        debug("tmrd=%d\n", tmrd);
1344        debug("tcwl=%d\n", tcwl);
1345        debug("tdllk=%d\n", tdllk);
1346        debug("trtp=%d\n", trtp);
1347        debug("twtr=%d\n", twtr);
1348        debug("trrd=%d\n", trrd);
1349        debug("txpr=%d\n", txpr);
1350        debug("cs0_end=%d\n", cs0_end);
1351        debug("ncs=%d\n", sysinfo->ncs);
1352        debug("Rtt_wr=%d\n", sysinfo->rtt_wr);
1353        debug("Rtt_nom=%d\n", sysinfo->rtt_nom);
1354        debug("SRT=%d\n", ddr3_cfg->SRT);
1355        debug("twr=%d\n", twr);
1356
1357        /*
1358         * board-specific configuration:
1359         *  These values are determined empirically and vary per board layout
1360         *  see:
1361         *   appnote, ddr3 spreadsheet
1362         */
1363        mmdc0->mpwldectrl0 = calib->p0_mpwldectrl0;
1364        mmdc0->mpwldectrl1 = calib->p0_mpwldectrl1;
1365        mmdc0->mpdgctrl0 = calib->p0_mpdgctrl0;
1366        mmdc0->mpdgctrl1 = calib->p0_mpdgctrl1;
1367        mmdc0->mprddlctl = calib->p0_mprddlctl;
1368        mmdc0->mpwrdlctl = calib->p0_mpwrdlctl;
1369        if (sysinfo->dsize > 1) {
1370                MMDC1(mpwldectrl0, calib->p1_mpwldectrl0);
1371                MMDC1(mpwldectrl1, calib->p1_mpwldectrl1);
1372                MMDC1(mpdgctrl0, calib->p1_mpdgctrl0);
1373                MMDC1(mpdgctrl1, calib->p1_mpdgctrl1);
1374                MMDC1(mprddlctl, calib->p1_mprddlctl);
1375                MMDC1(mpwrdlctl, calib->p1_mpwrdlctl);
1376        }
1377
1378        /* Read data DQ Byte0-3 delay */
1379        mmdc0->mprddqby0dl = 0x33333333;
1380        mmdc0->mprddqby1dl = 0x33333333;
1381        if (sysinfo->dsize > 0) {
1382                mmdc0->mprddqby2dl = 0x33333333;
1383                mmdc0->mprddqby3dl = 0x33333333;
1384        }
1385
1386        if (sysinfo->dsize > 1) {
1387                MMDC1(mprddqby0dl, 0x33333333);
1388                MMDC1(mprddqby1dl, 0x33333333);
1389                MMDC1(mprddqby2dl, 0x33333333);
1390                MMDC1(mprddqby3dl, 0x33333333);
1391        }
1392
1393        /* MMDC Termination: rtt_nom:2 RZQ/2(120ohm), rtt_nom:1 RZQ/4(60ohm) */
1394        val = (sysinfo->rtt_nom == 2) ? 0x00011117 : 0x00022227;
1395        mmdc0->mpodtctrl = val;
1396        if (sysinfo->dsize > 1)
1397                MMDC1(mpodtctrl, val);
1398
1399        /* complete calibration */
1400        val = (1 << 11); /* Force measurement on delay-lines */
1401        mmdc0->mpmur0 = val;
1402        if (sysinfo->dsize > 1)
1403                MMDC1(mpmur0, val);
1404
1405        /* Step 1: configuration request */
1406        mmdc0->mdscr = (u32)(1 << 15); /* config request */
1407
1408        /* Step 2: Timing configuration */
1409        mmdc0->mdcfg0 = (trfc << 24) | (txs << 16) | (txp << 13) |
1410                        (txpdll << 9) | (tfaw << 4) | tcl;
1411        mmdc0->mdcfg1 = (trcd << 29) | (trp << 26) | (trc << 21) |
1412                        (tras << 16) | (1 << 15) /* trpa */ |
1413                        (twr << 9) | (tmrd << 5) | tcwl;
1414        mmdc0->mdcfg2 = (tdllk << 16) | (trtp << 6) | (twtr << 3) | trrd;
1415        mmdc0->mdotc = (taofpd << 27) | (taonpd << 24) | (tanpd << 20) |
1416                       (taxpd << 16) | (todtlon << 12) | (todt_idle_off << 4);
1417        mmdc0->mdasp = cs0_end; /* CS addressing */
1418
1419        /* Step 3: Configure DDR type */
1420        mmdc0->mdmisc = (sysinfo->cs1_mirror << 19) | (sysinfo->walat << 16) |
1421                        (sysinfo->bi_on << 12) | (sysinfo->mif3_mode << 9) |
1422                        (sysinfo->ralat << 6);
1423
1424        /* Step 4: Configure delay while leaving reset */
1425        mmdc0->mdor = (txpr << 16) | (sysinfo->sde_to_rst << 8) |
1426                      (sysinfo->rst_to_cke << 0);
1427
1428        /* Step 5: Configure DDR physical parameters (density and burst len) */
1429        coladdr = ddr3_cfg->coladdr;
1430        if (ddr3_cfg->coladdr == 8)             /* 8-bit COL is 0x3 */
1431                coladdr += 4;
1432        else if (ddr3_cfg->coladdr == 12)       /* 12-bit COL is 0x4 */
1433                coladdr += 1;
1434        mmdc0->mdctl =  (ddr3_cfg->rowaddr - 11) << 24 |        /* ROW */
1435                        (coladdr - 9) << 20 |                   /* COL */
1436                        (1 << 19) |             /* Burst Length = 8 for DDR3 */
1437                        (sysinfo->dsize << 16);         /* DDR data bus size */
1438
1439        /* Step 6: Perform ZQ calibration */
1440        val = 0xa1390001; /* one-time HW ZQ calib */
1441        mmdc0->mpzqhwctrl = val;
1442        if (sysinfo->dsize > 1)
1443                MMDC1(mpzqhwctrl, val);
1444
1445        /* Step 7: Enable MMDC with desired chip select */
1446        mmdc0->mdctl |= (1 << 31) |                          /* SDE_0 for CS0 */
1447                        ((sysinfo->ncs == 2) ? 1 : 0) << 30; /* SDE_1 for CS1 */
1448
1449        /* Step 8: Write Mode Registers to Init DDR3 devices */
1450        for (cs = 0; cs < sysinfo->ncs; cs++) {
1451                /* MR2 */
1452                val = (sysinfo->rtt_wr & 3) << 9 | (ddr3_cfg->SRT & 1) << 7 |
1453                      ((tcwl - 3) & 3) << 3;
1454                debug("MR2 CS%d: 0x%08x\n", cs, (u32)MR(val, 2, 3, cs));
1455                mmdc0->mdscr = MR(val, 2, 3, cs);
1456                /* MR3 */
1457                debug("MR3 CS%d: 0x%08x\n", cs, (u32)MR(0, 3, 3, cs));
1458                mmdc0->mdscr = MR(0, 3, 3, cs);
1459                /* MR1 */
1460                val = ((sysinfo->rtt_nom & 1) ? 1 : 0) << 2 |
1461                      ((sysinfo->rtt_nom & 2) ? 1 : 0) << 6;
1462                debug("MR1 CS%d: 0x%08x\n", cs, (u32)MR(val, 1, 3, cs));
1463                mmdc0->mdscr = MR(val, 1, 3, cs);
1464                /* MR0 */
1465                val = ((tcl - 1) << 4) |        /* CAS */
1466                      (1 << 8)   |              /* DLL Reset */
1467                      ((twr - 3) << 9) |        /* Write Recovery */
1468                      (sysinfo->pd_fast_exit << 12); /* Precharge PD PLL on */
1469                debug("MR0 CS%d: 0x%08x\n", cs, (u32)MR(val, 0, 3, cs));
1470                mmdc0->mdscr = MR(val, 0, 3, cs);
1471                /* ZQ calibration */
1472                val = (1 << 10);
1473                mmdc0->mdscr = MR(val, 0, 4, cs);
1474        }
1475
1476        /* Step 10: Power down control and self-refresh */
1477        mmdc0->mdpdc = (tcke & 0x7) << 16 |
1478                        5            << 12 |  /* PWDT_1: 256 cycles */
1479                        5            <<  8 |  /* PWDT_0: 256 cycles */
1480                        1            <<  6 |  /* BOTH_CS_PD */
1481                        (tcksrx & 0x7) << 3 |
1482                        (tcksre & 0x7);
1483        if (!sysinfo->pd_fast_exit)
1484                mmdc0->mdpdc |= (1 << 7); /* SLOW_PD */
1485        mmdc0->mapsr = 0x00001006; /* ADOPT power down enabled */
1486
1487        /* Step 11: Configure ZQ calibration: one-time and periodic 1ms */
1488        val = 0xa1390003;
1489        mmdc0->mpzqhwctrl = val;
1490        if (sysinfo->dsize > 1)
1491                MMDC1(mpzqhwctrl, val);
1492
1493        /* Step 12: Configure and activate periodic refresh */
1494        mmdc0->mdref = (sysinfo->refsel << 14) | (sysinfo->refr << 11);
1495
1496        /* Step 13: Deassert config request - init complete */
1497        mmdc0->mdscr = 0x00000000;
1498
1499        /* wait for auto-ZQ calibration to complete */
1500        mdelay(1);
1501}
1502
1503void mmdc_read_calibration(struct mx6_ddr_sysinfo const *sysinfo,
1504                           struct mx6_mmdc_calibration *calib)
1505{
1506        struct mmdc_p_regs *mmdc0 = (struct mmdc_p_regs *)MMDC_P0_BASE_ADDR;
1507        struct mmdc_p_regs *mmdc1 = (struct mmdc_p_regs *)MMDC_P1_BASE_ADDR;
1508
1509        calib->p0_mpwldectrl0 = readl(&mmdc0->mpwldectrl0);
1510        calib->p0_mpwldectrl1 = readl(&mmdc0->mpwldectrl1);
1511        calib->p0_mpdgctrl0 = readl(&mmdc0->mpdgctrl0);
1512        calib->p0_mpdgctrl1 = readl(&mmdc0->mpdgctrl1);
1513        calib->p0_mprddlctl = readl(&mmdc0->mprddlctl);
1514        calib->p0_mpwrdlctl = readl(&mmdc0->mpwrdlctl);
1515
1516        if (sysinfo->dsize == 2) {
1517                calib->p1_mpwldectrl0 = readl(&mmdc1->mpwldectrl0);
1518                calib->p1_mpwldectrl1 = readl(&mmdc1->mpwldectrl1);
1519                calib->p1_mpdgctrl0 = readl(&mmdc1->mpdgctrl0);
1520                calib->p1_mpdgctrl1 = readl(&mmdc1->mpdgctrl1);
1521                calib->p1_mprddlctl = readl(&mmdc1->mprddlctl);
1522                calib->p1_mpwrdlctl = readl(&mmdc1->mpwrdlctl);
1523        }
1524}
1525
1526void mx6_dram_cfg(const struct mx6_ddr_sysinfo *sysinfo,
1527                  const struct mx6_mmdc_calibration *calib,
1528                  const void *ddr_cfg)
1529{
1530        if (sysinfo->ddr_type == DDR_TYPE_DDR3) {
1531                mx6_ddr3_cfg(sysinfo, calib, ddr_cfg);
1532        } else if (sysinfo->ddr_type == DDR_TYPE_LPDDR2) {
1533                mx6_lpddr2_cfg(sysinfo, calib, ddr_cfg);
1534        } else {
1535                puts("Unsupported ddr type\n");
1536                hang();
1537        }
1538}
1539