uboot/arch/arm/mach-exynos/dmc_init_ddr3.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0+
   2/*
   3 * DDR3 mem setup file for board based on EXYNOS5
   4 *
   5 * Copyright (C) 2012 Samsung Electronics
   6 */
   7
   8#include <common.h>
   9#include <config.h>
  10#include <asm/io.h>
  11#include <asm/arch/clock.h>
  12#include <asm/arch/cpu.h>
  13#include <asm/arch/dmc.h>
  14#include <asm/arch/power.h>
  15#include "common_setup.h"
  16#include "exynos5_setup.h"
  17#include "clock_init.h"
  18
  19#define TIMEOUT_US              10000
  20#define NUM_BYTE_LANES          4
  21#define DEFAULT_DQS             8
  22#define DEFAULT_DQS_X4          ((DEFAULT_DQS << 24) || (DEFAULT_DQS << 16) \
  23                                || (DEFAULT_DQS << 8) || (DEFAULT_DQS << 0))
  24
  25#ifdef CONFIG_EXYNOS5250
  26static void reset_phy_ctrl(void)
  27{
  28        struct exynos5_clock *clk =
  29                (struct exynos5_clock *)samsung_get_base_clock();
  30
  31        writel(DDR3PHY_CTRL_PHY_RESET_OFF, &clk->lpddr3phy_ctrl);
  32        writel(DDR3PHY_CTRL_PHY_RESET, &clk->lpddr3phy_ctrl);
  33}
  34
  35int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
  36{
  37        unsigned int val;
  38        struct exynos5_phy_control *phy0_ctrl, *phy1_ctrl;
  39        struct exynos5_dmc *dmc;
  40        int i;
  41
  42        phy0_ctrl = (struct exynos5_phy_control *)samsung_get_base_dmc_phy();
  43        phy1_ctrl = (struct exynos5_phy_control *)(samsung_get_base_dmc_phy()
  44                                                        + DMC_OFFSET);
  45        dmc = (struct exynos5_dmc *)samsung_get_base_dmc_ctrl();
  46
  47        if (reset)
  48                reset_phy_ctrl();
  49
  50        /* Set Impedance Output Driver */
  51        val = (mem->impedance << CA_CK_DRVR_DS_OFFSET) |
  52                (mem->impedance << CA_CKE_DRVR_DS_OFFSET) |
  53                (mem->impedance << CA_CS_DRVR_DS_OFFSET) |
  54                (mem->impedance << CA_ADR_DRVR_DS_OFFSET);
  55        writel(val, &phy0_ctrl->phy_con39);
  56        writel(val, &phy1_ctrl->phy_con39);
  57
  58        /* Set Read Latency and Burst Length for PHY0 and PHY1 */
  59        val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
  60                (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
  61        writel(val, &phy0_ctrl->phy_con42);
  62        writel(val, &phy1_ctrl->phy_con42);
  63
  64        /* ZQ Calibration */
  65        if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
  66                          &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
  67                return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
  68
  69        /* DQ Signal */
  70        writel(mem->phy0_pulld_dqs, &phy0_ctrl->phy_con14);
  71        writel(mem->phy1_pulld_dqs, &phy1_ctrl->phy_con14);
  72
  73        writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
  74                | (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT),
  75                &dmc->concontrol);
  76
  77        update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
  78
  79        /* DQS Signal */
  80        writel(mem->phy0_dqs, &phy0_ctrl->phy_con4);
  81        writel(mem->phy1_dqs, &phy1_ctrl->phy_con4);
  82
  83        writel(mem->phy0_dq, &phy0_ctrl->phy_con6);
  84        writel(mem->phy1_dq, &phy1_ctrl->phy_con6);
  85
  86        writel(mem->phy0_tFS, &phy0_ctrl->phy_con10);
  87        writel(mem->phy1_tFS, &phy1_ctrl->phy_con10);
  88
  89        val = (mem->ctrl_start_point << PHY_CON12_CTRL_START_POINT_SHIFT) |
  90                (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
  91                (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
  92                (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
  93        writel(val, &phy0_ctrl->phy_con12);
  94        writel(val, &phy1_ctrl->phy_con12);
  95
  96        /* Start DLL locking */
  97        writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
  98               &phy0_ctrl->phy_con12);
  99        writel(val | (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT),
 100               &phy1_ctrl->phy_con12);
 101
 102        update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
 103
 104        writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
 105               &dmc->concontrol);
 106
 107        /* Memory Channel Inteleaving Size */
 108        writel(mem->iv_size, &dmc->ivcontrol);
 109
 110        writel(mem->memconfig, &dmc->memconfig0);
 111        writel(mem->memconfig, &dmc->memconfig1);
 112        writel(mem->membaseconfig0, &dmc->membaseconfig0);
 113        writel(mem->membaseconfig1, &dmc->membaseconfig1);
 114
 115        /* Precharge Configuration */
 116        writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
 117               &dmc->prechconfig);
 118
 119        /* Power Down mode Configuration */
 120        writel(mem->dpwrdn_cyc << PWRDNCONFIG_DPWRDN_CYC_SHIFT |
 121                mem->dsref_cyc << PWRDNCONFIG_DSREF_CYC_SHIFT,
 122                &dmc->pwrdnconfig);
 123
 124        /* TimingRow, TimingData, TimingPower and Timingaref
 125         * values as per Memory AC parameters
 126         */
 127        writel(mem->timing_ref, &dmc->timingref);
 128        writel(mem->timing_row, &dmc->timingrow);
 129        writel(mem->timing_data, &dmc->timingdata);
 130        writel(mem->timing_power, &dmc->timingpower);
 131
 132        /* Send PALL command */
 133        dmc_config_prech(mem, &dmc->directcmd);
 134
 135        /* Send NOP, MRS and ZQINIT commands */
 136        dmc_config_mrs(mem, &dmc->directcmd);
 137
 138        if (mem->gate_leveling_enable) {
 139                val = PHY_CON0_RESET_VAL;
 140                val |= P0_CMD_EN;
 141                writel(val, &phy0_ctrl->phy_con0);
 142                writel(val, &phy1_ctrl->phy_con0);
 143
 144                val = PHY_CON2_RESET_VAL;
 145                val |= INIT_DESKEW_EN;
 146                writel(val, &phy0_ctrl->phy_con2);
 147                writel(val, &phy1_ctrl->phy_con2);
 148
 149                val = PHY_CON0_RESET_VAL;
 150                val |= P0_CMD_EN;
 151                val |= BYTE_RDLVL_EN;
 152                writel(val, &phy0_ctrl->phy_con0);
 153                writel(val, &phy1_ctrl->phy_con0);
 154
 155                val = (mem->ctrl_start_point <<
 156                                PHY_CON12_CTRL_START_POINT_SHIFT) |
 157                        (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
 158                        (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
 159                        (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
 160                        (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
 161                writel(val, &phy0_ctrl->phy_con12);
 162                writel(val, &phy1_ctrl->phy_con12);
 163
 164                val = PHY_CON2_RESET_VAL;
 165                val |= INIT_DESKEW_EN;
 166                val |= RDLVL_GATE_EN;
 167                writel(val, &phy0_ctrl->phy_con2);
 168                writel(val, &phy1_ctrl->phy_con2);
 169
 170                val = PHY_CON0_RESET_VAL;
 171                val |= P0_CMD_EN;
 172                val |= BYTE_RDLVL_EN;
 173                val |= CTRL_SHGATE;
 174                writel(val, &phy0_ctrl->phy_con0);
 175                writel(val, &phy1_ctrl->phy_con0);
 176
 177                val = PHY_CON1_RESET_VAL;
 178                val &= ~(CTRL_GATEDURADJ_MASK);
 179                writel(val, &phy0_ctrl->phy_con1);
 180                writel(val, &phy1_ctrl->phy_con1);
 181
 182                writel(CTRL_RDLVL_GATE_ENABLE, &dmc->rdlvl_config);
 183                i = TIMEOUT_US;
 184                while ((readl(&dmc->phystatus) &
 185                        (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1)) !=
 186                        (RDLVL_COMPLETE_CHO | RDLVL_COMPLETE_CH1) && i > 0) {
 187                        /*
 188                         * TODO(waihong): Comment on how long this take to
 189                         * timeout
 190                         */
 191                        sdelay(100);
 192                        i--;
 193                }
 194                if (!i)
 195                        return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
 196                writel(CTRL_RDLVL_GATE_DISABLE, &dmc->rdlvl_config);
 197
 198                writel(0, &phy0_ctrl->phy_con14);
 199                writel(0, &phy1_ctrl->phy_con14);
 200
 201                val = (mem->ctrl_start_point <<
 202                                PHY_CON12_CTRL_START_POINT_SHIFT) |
 203                        (mem->ctrl_inc << PHY_CON12_CTRL_INC_SHIFT) |
 204                        (mem->ctrl_force << PHY_CON12_CTRL_FORCE_SHIFT) |
 205                        (mem->ctrl_start << PHY_CON12_CTRL_START_SHIFT) |
 206                        (mem->ctrl_dll_on << PHY_CON12_CTRL_DLL_ON_SHIFT) |
 207                        (mem->ctrl_ref << PHY_CON12_CTRL_REF_SHIFT);
 208                writel(val, &phy0_ctrl->phy_con12);
 209                writel(val, &phy1_ctrl->phy_con12);
 210
 211                update_reset_dll(&dmc->phycontrol0, DDR_MODE_DDR3);
 212        }
 213
 214        /* Send PALL command */
 215        dmc_config_prech(mem, &dmc->directcmd);
 216
 217        writel(mem->memcontrol, &dmc->memcontrol);
 218
 219        /* Set DMC Concontrol and enable auto-refresh counter */
 220        writel(mem->concontrol | (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)
 221                | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT), &dmc->concontrol);
 222        return 0;
 223}
 224#endif
 225
 226#ifdef CONFIG_EXYNOS5420
 227/**
 228 * RAM address to use in the test.
 229 *
 230 * We'll use 4 words at this address and 4 at this address + 0x80 (Ares
 231 * interleaves channels every 128 bytes).  This will allow us to evaluate all of
 232 * the chips in a 1 chip per channel (2GB) system and half the chips in a 2
 233 * chip per channel (4GB) system.  We can't test the 2nd chip since we need to
 234 * do tests before the 2nd chip is enabled.  Looking at the 2nd chip isn't
 235 * critical because the 1st and 2nd chip have very similar timings (they'd
 236 * better have similar timings, since there's only a single adjustment that is
 237 * shared by both chips).
 238 */
 239const unsigned int test_addr = CONFIG_SYS_SDRAM_BASE;
 240
 241/* Test pattern with which RAM will be tested */
 242static const unsigned int test_pattern[] = {
 243        0x5a5a5a5a,
 244        0xa5a5a5a5,
 245        0xf0f0f0f0,
 246        0x0f0f0f0f,
 247};
 248
 249/**
 250 * This function is a test vector for sw read leveling,
 251 * it compares the read data with the written data.
 252 *
 253 * @param ch                    DMC channel number
 254 * @param byte_lane             which DQS byte offset,
 255 *                              possible values are 0,1,2,3
 256 * @return                      TRUE if memory was good, FALSE if not.
 257 */
 258static bool dmc_valid_window_test_vector(int ch, int byte_lane)
 259{
 260        unsigned int read_data;
 261        unsigned int mask;
 262        int i;
 263
 264        mask = 0xFF << (8 * byte_lane);
 265
 266        for (i = 0; i < ARRAY_SIZE(test_pattern); i++) {
 267                read_data = readl(test_addr + i * 4 + ch * 0x80);
 268                if ((read_data & mask) != (test_pattern[i] & mask))
 269                        return false;
 270        }
 271
 272        return true;
 273}
 274
 275/**
 276 * This function returns current read offset value.
 277 *
 278 * @param phy_ctrl      pointer to the current phy controller
 279 */
 280static unsigned int dmc_get_read_offset_value(struct exynos5420_phy_control
 281                                               *phy_ctrl)
 282{
 283        return readl(&phy_ctrl->phy_con4);
 284}
 285
 286/**
 287 * This function performs resync, so that slave DLL is updated.
 288 *
 289 * @param phy_ctrl      pointer to the current phy controller
 290 */
 291static void ddr_phy_set_do_resync(struct exynos5420_phy_control *phy_ctrl)
 292{
 293        setbits_le32(&phy_ctrl->phy_con10, PHY_CON10_CTRL_OFFSETR3);
 294        clrbits_le32(&phy_ctrl->phy_con10, PHY_CON10_CTRL_OFFSETR3);
 295}
 296
 297/**
 298 * This function sets read offset value register with 'offset'.
 299 *
 300 * ...we also call call ddr_phy_set_do_resync().
 301 *
 302 * @param phy_ctrl      pointer to the current phy controller
 303 * @param offset        offset to read DQS
 304 */
 305static void dmc_set_read_offset_value(struct exynos5420_phy_control *phy_ctrl,
 306                                      unsigned int offset)
 307{
 308        writel(offset, &phy_ctrl->phy_con4);
 309        ddr_phy_set_do_resync(phy_ctrl);
 310}
 311
 312/**
 313 * Convert a 2s complement byte to a byte with a sign bit.
 314 *
 315 * NOTE: you shouldn't use normal math on the number returned by this function.
 316 *   As an example, -10 = 0xf6.  After this function -10 = 0x8a.  If you wanted
 317 *   to do math and get the average of 10 and -10 (should be 0):
 318 *     0x8a + 0xa = 0x94 (-108)
 319 *     0x94 / 2   = 0xca (-54)
 320 *   ...and 0xca = sign bit plus 0x4a, or -74
 321 *
 322 * Also note that you lose the ability to represent -128 since there are two
 323 * representations of 0.
 324 *
 325 * @param b     The byte to convert in two's complement.
 326 * @return      The 7-bit value + sign bit.
 327 */
 328
 329unsigned char make_signed_byte(signed char b)
 330{
 331        if (b < 0)
 332                return 0x80 | -b;
 333        else
 334                return b;
 335}
 336
 337/**
 338 * Test various shifts starting at 'start' and going to 'end'.
 339 *
 340 * For each byte lane, we'll walk through shift starting at 'start' and going
 341 * to 'end' (inclusive).  When we are finally able to read the test pattern
 342 * we'll store the value in the results array.
 343 *
 344 * @param phy_ctrl              pointer to the current phy controller
 345 * @param ch                    channel number
 346 * @param start                 the start shift.  -127 to 127
 347 * @param end                   the end shift.  -127 to 127
 348 * @param results               we'll store results for each byte lane.
 349 */
 350
 351void test_shifts(struct exynos5420_phy_control *phy_ctrl, int ch,
 352                 int start, int end, int results[NUM_BYTE_LANES])
 353{
 354        int incr = (start < end) ? 1 : -1;
 355        int byte_lane;
 356
 357        for (byte_lane = 0; byte_lane < NUM_BYTE_LANES; byte_lane++) {
 358                int shift;
 359
 360                dmc_set_read_offset_value(phy_ctrl, DEFAULT_DQS_X4);
 361                results[byte_lane] = DEFAULT_DQS;
 362
 363                for (shift = start; shift != (end + incr); shift += incr) {
 364                        unsigned int byte_offsetr;
 365                        unsigned int offsetr;
 366
 367                        byte_offsetr = make_signed_byte(shift);
 368
 369                        offsetr = dmc_get_read_offset_value(phy_ctrl);
 370                        offsetr &= ~(0xFF << (8 * byte_lane));
 371                        offsetr |= (byte_offsetr << (8 * byte_lane));
 372                        dmc_set_read_offset_value(phy_ctrl, offsetr);
 373
 374                        if (dmc_valid_window_test_vector(ch, byte_lane)) {
 375                                results[byte_lane] = shift;
 376                                break;
 377                        }
 378                }
 379        }
 380}
 381
 382/**
 383 * This function performs SW read leveling to compensate DQ-DQS skew at
 384 * receiver it first finds the optimal read offset value on each DQS
 385 * then applies the value to PHY.
 386 *
 387 * Read offset value has its min margin and max margin. If read offset
 388 * value exceeds its min or max margin, read data will have corruption.
 389 * To avoid this we are doing sw read leveling.
 390 *
 391 * SW read leveling is:
 392 * 1> Finding offset value's left_limit and right_limit
 393 * 2> and calculate its center value
 394 * 3> finally programs that center value to PHY
 395 * 4> then PHY gets its optimal offset value.
 396 *
 397 * @param phy_ctrl              pointer to the current phy controller
 398 * @param ch                    channel number
 399 * @param coarse_lock_val       The coarse lock value read from PHY_CON13.
 400 *                              (0 - 0x7f)
 401 */
 402static void software_find_read_offset(struct exynos5420_phy_control *phy_ctrl,
 403                                      int ch, unsigned int coarse_lock_val)
 404{
 405        unsigned int offsetr_cent;
 406        int byte_lane;
 407        int left_limit;
 408        int right_limit;
 409        int left[NUM_BYTE_LANES];
 410        int right[NUM_BYTE_LANES];
 411        int i;
 412
 413        /* Fill the memory with test patterns */
 414        for (i = 0; i < ARRAY_SIZE(test_pattern); i++)
 415                writel(test_pattern[i], test_addr + i * 4 + ch * 0x80);
 416
 417        /* Figure out the limits we'll test with; keep -127 < limit < 127 */
 418        left_limit = DEFAULT_DQS - coarse_lock_val;
 419        right_limit = DEFAULT_DQS + coarse_lock_val;
 420        if (right_limit > 127)
 421                right_limit = 127;
 422
 423        /* Fill in the location where reads were OK from left and right */
 424        test_shifts(phy_ctrl, ch, left_limit, right_limit, left);
 425        test_shifts(phy_ctrl, ch, right_limit, left_limit, right);
 426
 427        /* Make a final value by taking the center between the left and right */
 428        offsetr_cent = 0;
 429        for (byte_lane = 0; byte_lane < NUM_BYTE_LANES; byte_lane++) {
 430                int temp_center;
 431                unsigned int vmwc;
 432
 433                temp_center = (left[byte_lane] + right[byte_lane]) / 2;
 434                vmwc = make_signed_byte(temp_center);
 435                offsetr_cent |= vmwc << (8 * byte_lane);
 436        }
 437        dmc_set_read_offset_value(phy_ctrl, offsetr_cent);
 438}
 439
 440int ddr3_mem_ctrl_init(struct mem_timings *mem, int reset)
 441{
 442        struct exynos5420_clock *clk =
 443                (struct exynos5420_clock *)samsung_get_base_clock();
 444        struct exynos5420_power *power =
 445                (struct exynos5420_power *)samsung_get_base_power();
 446        struct exynos5420_phy_control *phy0_ctrl, *phy1_ctrl;
 447        struct exynos5420_dmc *drex0, *drex1;
 448        struct exynos5420_tzasc *tzasc0, *tzasc1;
 449        struct exynos5_power *pmu;
 450        uint32_t val, n_lock_r, n_lock_w_phy0, n_lock_w_phy1;
 451        uint32_t lock0_info, lock1_info;
 452        int chip;
 453        int i;
 454
 455        phy0_ctrl = (struct exynos5420_phy_control *)samsung_get_base_dmc_phy();
 456        phy1_ctrl = (struct exynos5420_phy_control *)(samsung_get_base_dmc_phy()
 457                                                        + DMC_OFFSET);
 458        drex0 = (struct exynos5420_dmc *)samsung_get_base_dmc_ctrl();
 459        drex1 = (struct exynos5420_dmc *)(samsung_get_base_dmc_ctrl()
 460                                                        + DMC_OFFSET);
 461        tzasc0 = (struct exynos5420_tzasc *)samsung_get_base_dmc_tzasc();
 462        tzasc1 = (struct exynos5420_tzasc *)(samsung_get_base_dmc_tzasc()
 463                                                        + DMC_OFFSET);
 464        pmu = (struct exynos5_power *)EXYNOS5420_POWER_BASE;
 465
 466        if (CONFIG_NR_DRAM_BANKS > 4) {
 467                /* Need both controllers. */
 468                mem->memcontrol |= DMC_MEMCONTROL_NUM_CHIP_2;
 469                mem->chips_per_channel = 2;
 470                mem->chips_to_configure = 2;
 471        } else {
 472                /* 2GB requires a single controller */
 473                mem->memcontrol |= DMC_MEMCONTROL_NUM_CHIP_1;
 474        }
 475
 476        /* Enable PAUSE for DREX */
 477        setbits_le32(&clk->pause, ENABLE_BIT);
 478
 479        /* Enable BYPASS mode */
 480        setbits_le32(&clk->bpll_con1, BYPASS_EN);
 481
 482        writel(MUX_BPLL_SEL_FOUTBPLL, &clk->src_cdrex);
 483        do {
 484                val = readl(&clk->mux_stat_cdrex);
 485                val &= BPLL_SEL_MASK;
 486        } while (val != FOUTBPLL);
 487
 488        clrbits_le32(&clk->bpll_con1, BYPASS_EN);
 489
 490        /* Specify the DDR memory type as DDR3 */
 491        val = readl(&phy0_ctrl->phy_con0);
 492        val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
 493        val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
 494        writel(val, &phy0_ctrl->phy_con0);
 495
 496        val = readl(&phy1_ctrl->phy_con0);
 497        val &= ~(PHY_CON0_CTRL_DDR_MODE_MASK << PHY_CON0_CTRL_DDR_MODE_SHIFT);
 498        val |= (DDR_MODE_DDR3 << PHY_CON0_CTRL_DDR_MODE_SHIFT);
 499        writel(val, &phy1_ctrl->phy_con0);
 500
 501        /* Set Read Latency and Burst Length for PHY0 and PHY1 */
 502        val = (mem->ctrl_bstlen << PHY_CON42_CTRL_BSTLEN_SHIFT) |
 503                (mem->ctrl_rdlat << PHY_CON42_CTRL_RDLAT_SHIFT);
 504        writel(val, &phy0_ctrl->phy_con42);
 505        writel(val, &phy1_ctrl->phy_con42);
 506
 507        val = readl(&phy0_ctrl->phy_con26);
 508        val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
 509        val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
 510        writel(val, &phy0_ctrl->phy_con26);
 511
 512        val = readl(&phy1_ctrl->phy_con26);
 513        val &= ~(T_WRDATA_EN_MASK << T_WRDATA_EN_OFFSET);
 514        val |= (T_WRDATA_EN_DDR3 << T_WRDATA_EN_OFFSET);
 515        writel(val, &phy1_ctrl->phy_con26);
 516
 517        /*
 518         * Set Driver strength for CK, CKE, CS & CA to 0x7
 519         * Set Driver strength for Data Slice 0~3 to 0x7
 520         */
 521        val = (0x7 << CA_CK_DRVR_DS_OFFSET) | (0x7 << CA_CKE_DRVR_DS_OFFSET) |
 522                (0x7 << CA_CS_DRVR_DS_OFFSET) | (0x7 << CA_ADR_DRVR_DS_OFFSET);
 523        val |= (0x7 << DA_3_DS_OFFSET) | (0x7 << DA_2_DS_OFFSET) |
 524                (0x7 << DA_1_DS_OFFSET) | (0x7 << DA_0_DS_OFFSET);
 525        writel(val, &phy0_ctrl->phy_con39);
 526        writel(val, &phy1_ctrl->phy_con39);
 527
 528        /* ZQ Calibration */
 529        if (dmc_config_zq(mem, &phy0_ctrl->phy_con16, &phy1_ctrl->phy_con16,
 530                          &phy0_ctrl->phy_con17, &phy1_ctrl->phy_con17))
 531                return SETUP_ERR_ZQ_CALIBRATION_FAILURE;
 532
 533        clrbits_le32(&phy0_ctrl->phy_con16, ZQ_CLK_DIV_EN);
 534        clrbits_le32(&phy1_ctrl->phy_con16, ZQ_CLK_DIV_EN);
 535
 536        /* DQ Signal */
 537        val = readl(&phy0_ctrl->phy_con14);
 538        val |= mem->phy0_pulld_dqs;
 539        writel(val, &phy0_ctrl->phy_con14);
 540        val = readl(&phy1_ctrl->phy_con14);
 541        val |= mem->phy1_pulld_dqs;
 542        writel(val, &phy1_ctrl->phy_con14);
 543
 544        val = MEM_TERM_EN | PHY_TERM_EN;
 545        writel(val, &drex0->phycontrol0);
 546        writel(val, &drex1->phycontrol0);
 547
 548        writel(mem->concontrol |
 549                (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
 550                (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
 551                &drex0->concontrol);
 552        writel(mem->concontrol |
 553                (mem->dfi_init_start << CONCONTROL_DFI_INIT_START_SHIFT) |
 554                (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT),
 555                &drex1->concontrol);
 556
 557        do {
 558                val = readl(&drex0->phystatus);
 559        } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
 560        do {
 561                val = readl(&drex1->phystatus);
 562        } while ((val & DFI_INIT_COMPLETE) != DFI_INIT_COMPLETE);
 563
 564        clrbits_le32(&drex0->concontrol, DFI_INIT_START);
 565        clrbits_le32(&drex1->concontrol, DFI_INIT_START);
 566
 567        update_reset_dll(&drex0->phycontrol0, DDR_MODE_DDR3);
 568        update_reset_dll(&drex1->phycontrol0, DDR_MODE_DDR3);
 569
 570        /*
 571         * Set Base Address:
 572         * 0x2000_0000 ~ 0x5FFF_FFFF
 573         * 0x6000_0000 ~ 0x9FFF_FFFF
 574         */
 575        /* MEMBASECONFIG0 */
 576        val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_0) |
 577                DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
 578        writel(val, &tzasc0->membaseconfig0);
 579        writel(val, &tzasc1->membaseconfig0);
 580
 581        /* MEMBASECONFIG1 */
 582        val = DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_1) |
 583                DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK);
 584        writel(val, &tzasc0->membaseconfig1);
 585        writel(val, &tzasc1->membaseconfig1);
 586
 587        /*
 588         * Memory Channel Inteleaving Size
 589         * Ares Channel interleaving = 128 bytes
 590         */
 591        /* MEMCONFIG0/1 */
 592        writel(mem->memconfig, &tzasc0->memconfig0);
 593        writel(mem->memconfig, &tzasc1->memconfig0);
 594        writel(mem->memconfig, &tzasc0->memconfig1);
 595        writel(mem->memconfig, &tzasc1->memconfig1);
 596
 597        /* Precharge Configuration */
 598        writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
 599               &drex0->prechconfig0);
 600        writel(mem->prechconfig_tp_cnt << PRECHCONFIG_TP_CNT_SHIFT,
 601               &drex1->prechconfig0);
 602
 603        /*
 604         * TimingRow, TimingData, TimingPower and Timingaref
 605         * values as per Memory AC parameters
 606         */
 607        writel(mem->timing_ref, &drex0->timingref);
 608        writel(mem->timing_ref, &drex1->timingref);
 609        writel(mem->timing_row, &drex0->timingrow0);
 610        writel(mem->timing_row, &drex1->timingrow0);
 611        writel(mem->timing_data, &drex0->timingdata0);
 612        writel(mem->timing_data, &drex1->timingdata0);
 613        writel(mem->timing_power, &drex0->timingpower0);
 614        writel(mem->timing_power, &drex1->timingpower0);
 615
 616        if (reset) {
 617                /*
 618                 * Send NOP, MRS and ZQINIT commands
 619                 * Sending MRS command will reset the DRAM. We should not be
 620                 * resetting the DRAM after resume, this will lead to memory
 621                 * corruption as DRAM content is lost after DRAM reset
 622                 */
 623                dmc_config_mrs(mem, &drex0->directcmd);
 624                dmc_config_mrs(mem, &drex1->directcmd);
 625        }
 626
 627        /*
 628         * Get PHY_CON13 from both phys.  Gate CLKM around reading since
 629         * PHY_CON13 is glitchy when CLKM is running.  We're paranoid and
 630         * wait until we get a "fine lock", though a coarse lock is probably
 631         * OK (we only use the coarse numbers below).  We try to gate the
 632         * clock for as short a time as possible in case SDRAM is somehow
 633         * sensitive.  sdelay(10) in the loop is arbitrary to make sure
 634         * there is some time for PHY_CON13 to get updated.  In practice
 635         * no delay appears to be needed.
 636         */
 637        val = readl(&clk->gate_bus_cdrex);
 638        while (true) {
 639                writel(val & ~0x1, &clk->gate_bus_cdrex);
 640                lock0_info = readl(&phy0_ctrl->phy_con13);
 641                writel(val, &clk->gate_bus_cdrex);
 642
 643                if ((lock0_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
 644                        break;
 645
 646                sdelay(10);
 647        }
 648        while (true) {
 649                writel(val & ~0x2, &clk->gate_bus_cdrex);
 650                lock1_info = readl(&phy1_ctrl->phy_con13);
 651                writel(val, &clk->gate_bus_cdrex);
 652
 653                if ((lock1_info & CTRL_FINE_LOCKED) == CTRL_FINE_LOCKED)
 654                        break;
 655
 656                sdelay(10);
 657        }
 658
 659        if (!reset) {
 660                /*
 661                 * During Suspend-Resume & S/W-Reset, as soon as PMU releases
 662                 * pad retention, CKE goes high. This causes memory contents
 663                 * not to be retained during DRAM initialization. Therfore,
 664                 * there is a new control register(0x100431e8[28]) which lets us
 665                 * release pad retention and retain the memory content until the
 666                 * initialization is complete.
 667                 */
 668                writel(PAD_RETENTION_DRAM_COREBLK_VAL,
 669                       &power->pad_retention_dram_coreblk_option);
 670                do {
 671                        val = readl(&power->pad_retention_dram_status);
 672                } while (val != 0x1);
 673
 674                /*
 675                 * CKE PAD retention disables DRAM self-refresh mode.
 676                 * Send auto refresh command for DRAM refresh.
 677                 */
 678                for (i = 0; i < 128; i++) {
 679                        for (chip = 0; chip < mem->chips_to_configure; chip++) {
 680                                writel(DIRECT_CMD_REFA |
 681                                       (chip << DIRECT_CMD_CHIP_SHIFT),
 682                                       &drex0->directcmd);
 683                                writel(DIRECT_CMD_REFA |
 684                                       (chip << DIRECT_CMD_CHIP_SHIFT),
 685                                       &drex1->directcmd);
 686                        }
 687                }
 688        }
 689
 690        if (mem->gate_leveling_enable) {
 691                writel(PHY_CON0_RESET_VAL, &phy0_ctrl->phy_con0);
 692                writel(PHY_CON0_RESET_VAL, &phy1_ctrl->phy_con0);
 693
 694                setbits_le32(&phy0_ctrl->phy_con0, P0_CMD_EN);
 695                setbits_le32(&phy1_ctrl->phy_con0, P0_CMD_EN);
 696
 697                val = PHY_CON2_RESET_VAL;
 698                val |= INIT_DESKEW_EN;
 699                writel(val, &phy0_ctrl->phy_con2);
 700                writel(val, &phy1_ctrl->phy_con2);
 701
 702                val =  readl(&phy0_ctrl->phy_con1);
 703                val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
 704                writel(val, &phy0_ctrl->phy_con1);
 705
 706                val =  readl(&phy1_ctrl->phy_con1);
 707                val |= (RDLVL_PASS_ADJ_VAL << RDLVL_PASS_ADJ_OFFSET);
 708                writel(val, &phy1_ctrl->phy_con1);
 709
 710                n_lock_w_phy0 = (lock0_info & CTRL_LOCK_COARSE_MASK) >> 2;
 711                n_lock_r = readl(&phy0_ctrl->phy_con12);
 712                n_lock_r &= ~CTRL_DLL_ON;
 713                n_lock_r |= n_lock_w_phy0;
 714                writel(n_lock_r, &phy0_ctrl->phy_con12);
 715
 716                n_lock_w_phy1 = (lock1_info & CTRL_LOCK_COARSE_MASK) >> 2;
 717                n_lock_r = readl(&phy1_ctrl->phy_con12);
 718                n_lock_r &= ~CTRL_DLL_ON;
 719                n_lock_r |= n_lock_w_phy1;
 720                writel(n_lock_r, &phy1_ctrl->phy_con12);
 721
 722                val = (0x3 << DIRECT_CMD_BANK_SHIFT) | 0x4;
 723                for (chip = 0; chip < mem->chips_to_configure; chip++) {
 724                        writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
 725                               &drex0->directcmd);
 726                        writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
 727                               &drex1->directcmd);
 728                }
 729
 730                setbits_le32(&phy0_ctrl->phy_con2, RDLVL_GATE_EN);
 731                setbits_le32(&phy1_ctrl->phy_con2, RDLVL_GATE_EN);
 732
 733                setbits_le32(&phy0_ctrl->phy_con0, CTRL_SHGATE);
 734                setbits_le32(&phy1_ctrl->phy_con0, CTRL_SHGATE);
 735
 736                val = readl(&phy0_ctrl->phy_con1);
 737                val &= ~(CTRL_GATEDURADJ_MASK);
 738                writel(val, &phy0_ctrl->phy_con1);
 739
 740                val = readl(&phy1_ctrl->phy_con1);
 741                val &= ~(CTRL_GATEDURADJ_MASK);
 742                writel(val, &phy1_ctrl->phy_con1);
 743
 744                writel(CTRL_RDLVL_GATE_ENABLE, &drex0->rdlvl_config);
 745                i = TIMEOUT_US;
 746                while (((readl(&drex0->phystatus) & RDLVL_COMPLETE_CHO) !=
 747                        RDLVL_COMPLETE_CHO) && (i > 0)) {
 748                        /*
 749                         * TODO(waihong): Comment on how long this take to
 750                         * timeout
 751                         */
 752                        sdelay(100);
 753                        i--;
 754                }
 755                if (!i)
 756                        return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
 757                writel(CTRL_RDLVL_GATE_DISABLE, &drex0->rdlvl_config);
 758
 759                writel(CTRL_RDLVL_GATE_ENABLE, &drex1->rdlvl_config);
 760                i = TIMEOUT_US;
 761                while (((readl(&drex1->phystatus) & RDLVL_COMPLETE_CHO) !=
 762                        RDLVL_COMPLETE_CHO) && (i > 0)) {
 763                        /*
 764                         * TODO(waihong): Comment on how long this take to
 765                         * timeout
 766                         */
 767                        sdelay(100);
 768                        i--;
 769                }
 770                if (!i)
 771                        return SETUP_ERR_RDLV_COMPLETE_TIMEOUT;
 772                writel(CTRL_RDLVL_GATE_DISABLE, &drex1->rdlvl_config);
 773
 774                writel(0, &phy0_ctrl->phy_con14);
 775                writel(0, &phy1_ctrl->phy_con14);
 776
 777                val = (0x3 << DIRECT_CMD_BANK_SHIFT);
 778                for (chip = 0; chip < mem->chips_to_configure; chip++) {
 779                        writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
 780                               &drex0->directcmd);
 781                        writel(val | (chip << DIRECT_CMD_CHIP_SHIFT),
 782                               &drex1->directcmd);
 783                }
 784
 785                /* Common Settings for Leveling */
 786                val = PHY_CON12_RESET_VAL;
 787                writel((val + n_lock_w_phy0), &phy0_ctrl->phy_con12);
 788                writel((val + n_lock_w_phy1), &phy1_ctrl->phy_con12);
 789
 790                setbits_le32(&phy0_ctrl->phy_con2, DLL_DESKEW_EN);
 791                setbits_le32(&phy1_ctrl->phy_con2, DLL_DESKEW_EN);
 792        }
 793
 794        /*
 795         * Do software read leveling
 796         *
 797         * Do this before we turn on auto refresh since the auto refresh can
 798         * be in conflict with the resync operation that's part of setting
 799         * read leveling.
 800         */
 801        if (!reset) {
 802                /* restore calibrated value after resume */
 803                dmc_set_read_offset_value(phy0_ctrl, readl(&pmu->pmu_spare1));
 804                dmc_set_read_offset_value(phy1_ctrl, readl(&pmu->pmu_spare2));
 805        } else {
 806                software_find_read_offset(phy0_ctrl, 0,
 807                                          CTRL_LOCK_COARSE(lock0_info));
 808                software_find_read_offset(phy1_ctrl, 1,
 809                                          CTRL_LOCK_COARSE(lock1_info));
 810                /* save calibrated value to restore after resume */
 811                writel(dmc_get_read_offset_value(phy0_ctrl), &pmu->pmu_spare1);
 812                writel(dmc_get_read_offset_value(phy1_ctrl), &pmu->pmu_spare2);
 813        }
 814
 815        /* Send PALL command */
 816        dmc_config_prech(mem, &drex0->directcmd);
 817        dmc_config_prech(mem, &drex1->directcmd);
 818
 819        writel(mem->memcontrol, &drex0->memcontrol);
 820        writel(mem->memcontrol, &drex1->memcontrol);
 821
 822        /*
 823         * Set DMC Concontrol: Enable auto-refresh counter, provide
 824         * read data fetch cycles and enable DREX auto set powerdown
 825         * for input buffer of I/O in none read memory state.
 826         */
 827        writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
 828                (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
 829                DMC_CONCONTROL_IO_PD_CON(0x2),
 830                &drex0->concontrol);
 831        writel(mem->concontrol | (mem->aref_en << CONCONTROL_AREF_EN_SHIFT) |
 832                (mem->rd_fetch << CONCONTROL_RD_FETCH_SHIFT)|
 833                DMC_CONCONTROL_IO_PD_CON(0x2),
 834                &drex1->concontrol);
 835
 836        /*
 837         * Enable Clock Gating Control for DMC
 838         * this saves around 25 mw dmc power as compared to the power
 839         * consumption without these bits enabled
 840         */
 841        setbits_le32(&drex0->cgcontrol, DMC_INTERNAL_CG);
 842        setbits_le32(&drex1->cgcontrol, DMC_INTERNAL_CG);
 843
 844        /*
 845         * As per Exynos5800 UM ver 0.00 section 17.13.2.1
 846         * CONCONTROL register bit 3 [update_mode], Exynos5800 does not
 847         * support the PHY initiated update. And it is recommended to set
 848         * this field to 1'b1 during initialization
 849         *
 850         * When we apply PHY-initiated mode, DLL lock value is determined
 851         * once at DMC init time and not updated later when we change the MIF
 852         * voltage based on ASV group in kernel. Applying MC-initiated mode
 853         * makes sure that DLL tracing is ON so that silicon is able to
 854         * compensate the voltage variation.
 855         */
 856        val = readl(&drex0->concontrol);
 857        val |= CONCONTROL_UPDATE_MODE;
 858        writel(val, &drex0->concontrol);
 859        val = readl(&drex1->concontrol);
 860        val |= CONCONTROL_UPDATE_MODE;
 861        writel(val, &drex1->concontrol);
 862
 863        return 0;
 864}
 865#endif
 866