uboot/drivers/ddr/marvell/axp/ddr3_spd.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (C) Marvell International Ltd. and its affiliates
   4 */
   5
   6#include <common.h>
   7#include <i2c.h>
   8#include <spl.h>
   9#include <asm/io.h>
  10#include <asm/arch/cpu.h>
  11#include <asm/arch/soc.h>
  12
  13#include "ddr3_init.h"
  14
  15#if defined(MV88F78X60)
  16#include "ddr3_axp_config.h"
  17#elif defined(MV88F67XX)
  18#include "ddr3_a370_config.h"
  19#endif
  20
  21#if defined(MV88F672X)
  22#include "ddr3_a375_config.h"
  23#endif
  24
  25#ifdef DUNIT_SPD
  26
  27/* DIMM SPD offsets */
  28#define SPD_DEV_TYPE_BYTE               2
  29
  30#define SPD_MODULE_TYPE_BYTE            3
  31#define SPD_MODULE_MASK                 0xf
  32#define SPD_MODULE_TYPE_RDIMM           1
  33#define SPD_MODULE_TYPE_UDIMM           2
  34
  35#define SPD_DEV_DENSITY_BYTE            4
  36#define SPD_DEV_DENSITY_MASK            0xf
  37
  38#define SPD_ROW_NUM_BYTE                5
  39#define SPD_ROW_NUM_MIN                 12
  40#define SPD_ROW_NUM_OFF                 3
  41#define SPD_ROW_NUM_MASK                (7 << SPD_ROW_NUM_OFF)
  42
  43#define SPD_COL_NUM_BYTE                5
  44#define SPD_COL_NUM_MIN                 9
  45#define SPD_COL_NUM_OFF                 0
  46#define SPD_COL_NUM_MASK                (7 << SPD_COL_NUM_OFF)
  47
  48#define SPD_MODULE_ORG_BYTE             7
  49#define SPD_MODULE_SDRAM_DEV_WIDTH_OFF  0
  50#define SPD_MODULE_SDRAM_DEV_WIDTH_MASK (7 << SPD_MODULE_SDRAM_DEV_WIDTH_OFF)
  51#define SPD_MODULE_BANK_NUM_MIN         1
  52#define SPD_MODULE_BANK_NUM_OFF         3
  53#define SPD_MODULE_BANK_NUM_MASK        (7 << SPD_MODULE_BANK_NUM_OFF)
  54
  55#define SPD_BUS_WIDTH_BYTE              8
  56#define SPD_BUS_WIDTH_OFF               0
  57#define SPD_BUS_WIDTH_MASK              (7 << SPD_BUS_WIDTH_OFF)
  58#define SPD_BUS_ECC_OFF                 3
  59#define SPD_BUS_ECC_MASK                (3 << SPD_BUS_ECC_OFF)
  60
  61#define SPD_MTB_DIVIDEND_BYTE           10
  62#define SPD_MTB_DIVISOR_BYTE            11
  63#define SPD_TCK_BYTE                    12
  64#define SPD_SUP_CAS_LAT_LSB_BYTE        14
  65#define SPD_SUP_CAS_LAT_MSB_BYTE        15
  66#define SPD_TAA_BYTE                    16
  67#define SPD_TWR_BYTE                    17
  68#define SPD_TRCD_BYTE                   18
  69#define SPD_TRRD_BYTE                   19
  70#define SPD_TRP_BYTE                    20
  71
  72#define SPD_TRAS_MSB_BYTE               21
  73#define SPD_TRAS_MSB_MASK               0xf
  74
  75#define SPD_TRC_MSB_BYTE                21
  76#define SPD_TRC_MSB_MASK                0xf0
  77
  78#define SPD_TRAS_LSB_BYTE               22
  79#define SPD_TRC_LSB_BYTE                23
  80#define SPD_TRFC_LSB_BYTE               24
  81#define SPD_TRFC_MSB_BYTE               25
  82#define SPD_TWTR_BYTE                   26
  83#define SPD_TRTP_BYTE                   27
  84
  85#define SPD_TFAW_MSB_BYTE               28
  86#define SPD_TFAW_MSB_MASK               0xf
  87
  88#define SPD_TFAW_LSB_BYTE               29
  89#define SPD_OPT_FEATURES_BYTE           30
  90#define SPD_THERMAL_REFRESH_OPT_BYTE    31
  91
  92#define SPD_ADDR_MAP_BYTE               63
  93#define SPD_ADDR_MAP_MIRROR_OFFS        0
  94
  95#define SPD_RDIMM_RC_BYTE               69
  96#define SPD_RDIMM_RC_NIBBLE_MASK        0xF
  97#define SPD_RDIMM_RC_NUM                16
  98
  99/* Dimm Memory Type values */
 100#define SPD_MEM_TYPE_SDRAM              0x4
 101#define SPD_MEM_TYPE_DDR1               0x7
 102#define SPD_MEM_TYPE_DDR2               0x8
 103#define SPD_MEM_TYPE_DDR3               0xB
 104
 105#define DIMM_MODULE_MANU_OFFS           64
 106#define DIMM_MODULE_MANU_SIZE           8
 107#define DIMM_MODULE_VEN_OFFS            73
 108#define DIMM_MODULE_VEN_SIZE            25
 109#define DIMM_MODULE_ID_OFFS             99
 110#define DIMM_MODULE_ID_SIZE             18
 111
 112/* enumeration for voltage levels. */
 113enum dimm_volt_if {
 114        TTL_5V_TOLERANT,
 115        LVTTL,
 116        HSTL_1_5V,
 117        SSTL_3_3V,
 118        SSTL_2_5V,
 119        VOLTAGE_UNKNOWN,
 120};
 121
 122/* enumaration for SDRAM CAS Latencies. */
 123enum dimm_sdram_cas {
 124        SD_CL_1 = 1,
 125        SD_CL_2,
 126        SD_CL_3,
 127        SD_CL_4,
 128        SD_CL_5,
 129        SD_CL_6,
 130        SD_CL_7,
 131        SD_FAULT
 132};
 133
 134/* enumeration for memory types */
 135enum memory_type {
 136        MEM_TYPE_SDRAM,
 137        MEM_TYPE_DDR1,
 138        MEM_TYPE_DDR2,
 139        MEM_TYPE_DDR3
 140};
 141
 142/* DIMM information structure */
 143typedef struct dimm_info {
 144        /* DIMM dimensions */
 145        u32 num_of_module_ranks;
 146        u32 data_width;
 147        u32 rank_capacity;
 148        u32 num_of_devices;
 149
 150        u32 sdram_width;
 151        u32 num_of_banks_on_each_device;
 152        u32 sdram_capacity;
 153
 154        u32 num_of_row_addr;
 155        u32 num_of_col_addr;
 156
 157        u32 addr_mirroring;
 158
 159        u32 err_check_type;                     /* ECC , PARITY.. */
 160        u32 type_info;                          /* DDR2 only */
 161
 162        /* DIMM timing parameters */
 163        u32 supported_cas_latencies;
 164        u32 refresh_interval;
 165        u32 min_cycle_time;
 166        u32 min_row_precharge_time;
 167        u32 min_row_active_to_row_active;
 168        u32 min_ras_to_cas_delay;
 169        u32 min_write_recovery_time;            /* DDR3/2 only */
 170        u32 min_write_to_read_cmd_delay;        /* DDR3/2 only */
 171        u32 min_read_to_prech_cmd_delay;        /* DDR3/2 only */
 172        u32 min_active_to_precharge;
 173        u32 min_refresh_recovery;               /* DDR3/2 only */
 174        u32 min_cas_lat_time;
 175        u32 min_four_active_win_delay;
 176        u8 dimm_rc[SPD_RDIMM_RC_NUM];
 177
 178        /* DIMM vendor ID */
 179        u32 vendor;
 180} MV_DIMM_INFO;
 181
 182static int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info,
 183                             u32 dimm);
 184static u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val);
 185static u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val);
 186static int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width);
 187static u32 ddr3_div(u32 val, u32 divider, u32 sub);
 188
 189extern u8 spd_data[SPD_SIZE];
 190extern u32 odt_config[ODT_OPT];
 191extern u16 odt_static[ODT_OPT][MAX_CS];
 192extern u16 odt_dynamic[ODT_OPT][MAX_CS];
 193
 194#if !(defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710))
 195/*
 196 * Name:     ddr3_get_dimm_num - Find number of dimms and their addresses
 197 * Desc:
 198 * Args:     dimm_addr - array of dimm addresses
 199 * Notes:
 200 * Returns:  None.
 201 */
 202static u32 ddr3_get_dimm_num(u32 *dimm_addr)
 203{
 204        u32 dimm_cur_addr;
 205        u8 data[3];
 206        u32 dimm_num = 0;
 207        int ret;
 208
 209        /* Read the dimm eeprom */
 210        for (dimm_cur_addr = MAX_DIMM_ADDR; dimm_cur_addr > MIN_DIMM_ADDR;
 211             dimm_cur_addr--) {
 212                struct udevice *udev;
 213
 214                data[SPD_DEV_TYPE_BYTE] = 0;
 215
 216                /* Far-End DIMM must be connected */
 217                if ((dimm_num == 0) && (dimm_cur_addr < FAR_END_DIMM_ADDR))
 218                        return 0;
 219
 220                ret = i2c_get_chip_for_busnum(0, dimm_cur_addr, 1, &udev);
 221                if (ret)
 222                        continue;
 223
 224                ret = dm_i2c_read(udev, 0, data, 3);
 225                if (!ret) {
 226                        if (data[SPD_DEV_TYPE_BYTE] == SPD_MEM_TYPE_DDR3) {
 227                                dimm_addr[dimm_num] = dimm_cur_addr;
 228                                dimm_num++;
 229                        }
 230                }
 231        }
 232
 233        return dimm_num;
 234}
 235#endif
 236
 237/*
 238 * Name:     dimmSpdInit - Get the SPD parameters.
 239 * Desc:     Read the DIMM SPD parameters into given struct parameter.
 240 * Args:     dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
 241 *           info - DIMM information structure.
 242 * Notes:
 243 * Returns:  MV_OK if function could read DIMM parameters, 0 otherwise.
 244 */
 245int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width)
 246{
 247        u32 tmp;
 248        u32 time_base;
 249        int ret;
 250        __maybe_unused u32 rc;
 251        __maybe_unused u8 vendor_high, vendor_low;
 252
 253        if (dimm_addr != 0) {
 254                struct udevice *udev;
 255
 256                memset(spd_data, 0, SPD_SIZE * sizeof(u8));
 257
 258                ret = i2c_get_chip_for_busnum(0, dimm_addr, 1, &udev);
 259                if (ret)
 260                        return MV_DDR3_TRAINING_ERR_TWSI_FAIL;
 261
 262                ret = dm_i2c_read(udev, 0, spd_data, SPD_SIZE);
 263                if (ret)
 264                        return MV_DDR3_TRAINING_ERR_TWSI_FAIL;
 265        }
 266
 267        /* Check if DDR3 */
 268        if (spd_data[SPD_DEV_TYPE_BYTE] != SPD_MEM_TYPE_DDR3)
 269                return MV_DDR3_TRAINING_ERR_TWSI_BAD_TYPE;
 270
 271        /* Error Check Type */
 272        /* No byte for error check in DDR3 SPD, use DDR2 convention */
 273        info->err_check_type = 0;
 274
 275        /* Check if ECC */
 276        if ((spd_data[SPD_BUS_WIDTH_BYTE] & 0x18) >> 3)
 277                info->err_check_type = 1;
 278
 279        DEBUG_INIT_FULL_C("DRAM err_check_type ", info->err_check_type, 1);
 280        switch (spd_data[SPD_MODULE_TYPE_BYTE]) {
 281        case 1:
 282                /* support RDIMM */
 283                info->type_info = SPD_MODULE_TYPE_RDIMM;
 284                break;
 285        case 2:
 286                /* support UDIMM */
 287                info->type_info = SPD_MODULE_TYPE_UDIMM;
 288                break;
 289        case 11:                /* LRDIMM current not supported */
 290        default:
 291                info->type_info = (spd_data[SPD_MODULE_TYPE_BYTE]);
 292                break;
 293        }
 294
 295        /* Size Calculations: */
 296
 297        /* Number Of Row Addresses - 12/13/14/15/16 */
 298        info->num_of_row_addr =
 299                (spd_data[SPD_ROW_NUM_BYTE] & SPD_ROW_NUM_MASK) >>
 300                SPD_ROW_NUM_OFF;
 301        info->num_of_row_addr += SPD_ROW_NUM_MIN;
 302        DEBUG_INIT_FULL_C("DRAM num_of_row_addr ", info->num_of_row_addr, 2);
 303
 304        /* Number Of Column Addresses - 9/10/11/12 */
 305        info->num_of_col_addr =
 306                (spd_data[SPD_COL_NUM_BYTE] & SPD_COL_NUM_MASK) >>
 307                SPD_COL_NUM_OFF;
 308        info->num_of_col_addr += SPD_COL_NUM_MIN;
 309        DEBUG_INIT_FULL_C("DRAM num_of_col_addr ", info->num_of_col_addr, 1);
 310
 311        /* Number Of Ranks = number of CS on Dimm - 1/2/3/4 Ranks */
 312        info->num_of_module_ranks =
 313                (spd_data[SPD_MODULE_ORG_BYTE] & SPD_MODULE_BANK_NUM_MASK) >>
 314                SPD_MODULE_BANK_NUM_OFF;
 315        info->num_of_module_ranks += SPD_MODULE_BANK_NUM_MIN;
 316        DEBUG_INIT_FULL_C("DRAM numOfModuleBanks ", info->num_of_module_ranks,
 317                          1);
 318
 319        /* Data Width - 8/16/32/64 bits */
 320        info->data_width =
 321                1 << (3 + (spd_data[SPD_BUS_WIDTH_BYTE] & SPD_BUS_WIDTH_MASK));
 322        DEBUG_INIT_FULL_C("DRAM data_width ", info->data_width, 1);
 323
 324        /* Number Of Banks On Each Device - 8/16/32/64 banks */
 325        info->num_of_banks_on_each_device =
 326                1 << (3 + ((spd_data[SPD_DEV_DENSITY_BYTE] >> 4) & 0x7));
 327        DEBUG_INIT_FULL_C("DRAM num_of_banks_on_each_device ",
 328                          info->num_of_banks_on_each_device, 1);
 329
 330        /* Total SDRAM capacity - 256Mb/512Mb/1Gb/2Gb/4Gb/8Gb/16Gb - MegaBits */
 331        info->sdram_capacity =
 332                spd_data[SPD_DEV_DENSITY_BYTE] & SPD_DEV_DENSITY_MASK;
 333
 334        /* Sdram Width - 4/8/16/32 bits */
 335        info->sdram_width = 1 << (2 + (spd_data[SPD_MODULE_ORG_BYTE] &
 336                                       SPD_MODULE_SDRAM_DEV_WIDTH_MASK));
 337        DEBUG_INIT_FULL_C("DRAM sdram_width ", info->sdram_width, 1);
 338
 339        /* CS (Rank) Capacity - MB */
 340        /*
 341         * DDR3 device uiDensity val are: (device capacity/8) *
 342         * (Module_width/Device_width)
 343         */
 344        /* Jedec SPD DDR3 - page 7, Save spd_data in Mb  - 2048=2GB */
 345        if (dimm_width == 32) {
 346                info->rank_capacity =
 347                        ((1 << info->sdram_capacity) * 256 *
 348                         (info->data_width / info->sdram_width)) << 16;
 349                /* CS size = CS size / 2  */
 350        } else {
 351                info->rank_capacity =
 352                        ((1 << info->sdram_capacity) * 256 *
 353                         (info->data_width / info->sdram_width) * 0x2) << 16;
 354                /* 0x2 =>  0x100000-1Mbit / 8-bit->byte / 0x10000  */
 355        }
 356        DEBUG_INIT_FULL_C("DRAM rank_capacity[31] ", info->rank_capacity, 1);
 357
 358        /* Number of devices includeing Error correction */
 359        info->num_of_devices =
 360                ((info->data_width / info->sdram_width) *
 361                 info->num_of_module_ranks) + info->err_check_type;
 362        DEBUG_INIT_FULL_C("DRAM num_of_devices  ", info->num_of_devices, 1);
 363
 364        /* Address Mapping from Edge connector to DRAM - mirroring option */
 365        info->addr_mirroring =
 366                spd_data[SPD_ADDR_MAP_BYTE] & (1 << SPD_ADDR_MAP_MIRROR_OFFS);
 367
 368        /* Timings - All in ps */
 369
 370        time_base = (1000 * spd_data[SPD_MTB_DIVIDEND_BYTE]) /
 371                spd_data[SPD_MTB_DIVISOR_BYTE];
 372
 373        /* Minimum Cycle Time At Max CasLatancy */
 374        info->min_cycle_time = spd_data[SPD_TCK_BYTE] * time_base;
 375        DEBUG_INIT_FULL_C("DRAM tCKmin ", info->min_cycle_time, 1);
 376
 377        /* Refresh Interval */
 378        /* No byte for refresh interval in DDR3 SPD, use DDR2 convention */
 379        /*
 380         * JEDEC param are 0 <= Tcase <= 85: 7.8uSec, 85 <= Tcase
 381         * <= 95: 3.9uSec
 382         */
 383        info->refresh_interval = 7800000;       /* Set to 7.8uSec */
 384        DEBUG_INIT_FULL_C("DRAM refresh_interval ", info->refresh_interval, 1);
 385
 386        /* Suported Cas Latencies -  DDR 3: */
 387
 388        /*
 389         *         bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 *
 390         *******-******-******-******-******-******-******-*******-*******
 391         CAS =      11  |  10  |  9   |  8   |  7   |  6   |  5   |  4   *
 392         *********************************************************-*******
 393         *******-******-******-******-******-******-******-*******-*******
 394         *        bit15 |bit14 |bit13 |bit12 |bit11 |bit10 | bit9 | bit8 *
 395         *******-******-******-******-******-******-******-*******-*******
 396         CAS =     TBD  |  18  |  17  |  16  |  15  |  14  |  13  |  12  *
 397        */
 398
 399        /* DDR3 include 2 byte of CAS support */
 400        info->supported_cas_latencies =
 401                (spd_data[SPD_SUP_CAS_LAT_MSB_BYTE] << 8) |
 402                spd_data[SPD_SUP_CAS_LAT_LSB_BYTE];
 403        DEBUG_INIT_FULL_C("DRAM supported_cas_latencies ",
 404                          info->supported_cas_latencies, 1);
 405
 406        /* Minimum Cycle Time At Max CasLatancy */
 407        info->min_cas_lat_time = (spd_data[SPD_TAA_BYTE] * time_base);
 408        /*
 409         * This field divided by the cycleTime will give us the CAS latency
 410         * to config
 411         */
 412
 413        /*
 414         * For DDR3 and DDR2 includes Write Recovery Time field.
 415         * Other SDRAM ignore
 416         */
 417        info->min_write_recovery_time = spd_data[SPD_TWR_BYTE] * time_base;
 418        DEBUG_INIT_FULL_C("DRAM min_write_recovery_time ",
 419                          info->min_write_recovery_time, 1);
 420
 421        /* Mininmum Ras to Cas Delay */
 422        info->min_ras_to_cas_delay = spd_data[SPD_TRCD_BYTE] * time_base;
 423        DEBUG_INIT_FULL_C("DRAM min_ras_to_cas_delay ",
 424                          info->min_ras_to_cas_delay, 1);
 425
 426        /* Minimum Row Active to Row Active Time */
 427        info->min_row_active_to_row_active =
 428            spd_data[SPD_TRRD_BYTE] * time_base;
 429        DEBUG_INIT_FULL_C("DRAM min_row_active_to_row_active ",
 430                          info->min_row_active_to_row_active, 1);
 431
 432        /* Minimum Row Precharge Delay Time */
 433        info->min_row_precharge_time = spd_data[SPD_TRP_BYTE] * time_base;
 434        DEBUG_INIT_FULL_C("DRAM min_row_precharge_time ",
 435                          info->min_row_precharge_time, 1);
 436
 437        /* Minimum Active to Precharge Delay Time - tRAS   ps */
 438        info->min_active_to_precharge =
 439                (spd_data[SPD_TRAS_MSB_BYTE] & SPD_TRAS_MSB_MASK) << 8;
 440        info->min_active_to_precharge |= spd_data[SPD_TRAS_LSB_BYTE];
 441        info->min_active_to_precharge *= time_base;
 442        DEBUG_INIT_FULL_C("DRAM min_active_to_precharge ",
 443                          info->min_active_to_precharge, 1);
 444
 445        /* Minimum Refresh Recovery Delay Time - tRFC  ps */
 446        info->min_refresh_recovery = spd_data[SPD_TRFC_MSB_BYTE] << 8;
 447        info->min_refresh_recovery |= spd_data[SPD_TRFC_LSB_BYTE];
 448        info->min_refresh_recovery *= time_base;
 449        DEBUG_INIT_FULL_C("DRAM min_refresh_recovery ",
 450                          info->min_refresh_recovery, 1);
 451
 452        /*
 453         * For DDR3 and DDR2 includes Internal Write To Read Command Delay
 454         * field.
 455         */
 456        info->min_write_to_read_cmd_delay = spd_data[SPD_TWTR_BYTE] * time_base;
 457        DEBUG_INIT_FULL_C("DRAM min_write_to_read_cmd_delay ",
 458                          info->min_write_to_read_cmd_delay, 1);
 459
 460        /*
 461         * For DDR3 and DDR2 includes Internal Read To Precharge Command Delay
 462         * field.
 463         */
 464        info->min_read_to_prech_cmd_delay = spd_data[SPD_TRTP_BYTE] * time_base;
 465        DEBUG_INIT_FULL_C("DRAM min_read_to_prech_cmd_delay ",
 466                          info->min_read_to_prech_cmd_delay, 1);
 467
 468        /*
 469         * For DDR3 includes Minimum Activate to Activate/Refresh Command
 470         * field
 471         */
 472        tmp = ((spd_data[SPD_TFAW_MSB_BYTE] & SPD_TFAW_MSB_MASK) << 8) |
 473                spd_data[SPD_TFAW_LSB_BYTE];
 474        info->min_four_active_win_delay = tmp * time_base;
 475        DEBUG_INIT_FULL_C("DRAM min_four_active_win_delay ",
 476                          info->min_four_active_win_delay, 1);
 477
 478#if defined(MV88F78X60) || defined(MV88F672X)
 479        /* Registered DIMM support */
 480        if (info->type_info == SPD_MODULE_TYPE_RDIMM) {
 481                for (rc = 2; rc < 6; rc += 2) {
 482                        tmp = spd_data[SPD_RDIMM_RC_BYTE + rc / 2];
 483                        info->dimm_rc[rc] =
 484                                spd_data[SPD_RDIMM_RC_BYTE + rc / 2] &
 485                                SPD_RDIMM_RC_NIBBLE_MASK;
 486                        info->dimm_rc[rc + 1] =
 487                                (spd_data[SPD_RDIMM_RC_BYTE + rc / 2] >> 4) &
 488                                SPD_RDIMM_RC_NIBBLE_MASK;
 489                }
 490
 491                vendor_low = spd_data[66];
 492                vendor_high = spd_data[65];
 493                info->vendor = (vendor_high << 8) + vendor_low;
 494                DEBUG_INIT_C("DDR3 Training Sequence - Registered DIMM vendor ID 0x",
 495                             info->vendor, 4);
 496
 497                info->dimm_rc[0] = RDIMM_RC0;
 498                info->dimm_rc[1] = RDIMM_RC1;
 499                info->dimm_rc[2] = RDIMM_RC2;
 500                info->dimm_rc[8] = RDIMM_RC8;
 501                info->dimm_rc[9] = RDIMM_RC9;
 502                info->dimm_rc[10] = RDIMM_RC10;
 503                info->dimm_rc[11] = RDIMM_RC11;
 504        }
 505#endif
 506
 507        return MV_OK;
 508}
 509
 510/*
 511 * Name:     ddr3_spd_sum_init - Get the SPD parameters.
 512 * Desc:     Read the DIMM SPD parameters into given struct parameter.
 513 * Args:     dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
 514 *           info - DIMM information structure.
 515 * Notes:
 516 * Returns:  MV_OK if function could read DIMM parameters, 0 otherwise.
 517 */
 518int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info, u32 dimm)
 519{
 520        if (dimm == 0) {
 521                memcpy(sum_info, info, sizeof(MV_DIMM_INFO));
 522                return MV_OK;
 523        }
 524        if (sum_info->type_info != info->type_info) {
 525                DEBUG_INIT_S("DDR3 Dimm Compare - DIMM type does not match - FAIL\n");
 526                return MV_DDR3_TRAINING_ERR_DIMM_TYPE_NO_MATCH;
 527        }
 528        if (sum_info->err_check_type > info->err_check_type) {
 529                sum_info->err_check_type = info->err_check_type;
 530                DEBUG_INIT_S("DDR3 Dimm Compare - ECC does not match. ECC is disabled\n");
 531        }
 532        if (sum_info->data_width != info->data_width) {
 533                DEBUG_INIT_S("DDR3 Dimm Compare - DRAM bus width does not match - FAIL\n");
 534                return MV_DDR3_TRAINING_ERR_BUS_WIDTH_NOT_MATCH;
 535        }
 536        if (sum_info->min_cycle_time < info->min_cycle_time)
 537                sum_info->min_cycle_time = info->min_cycle_time;
 538        if (sum_info->refresh_interval < info->refresh_interval)
 539                sum_info->refresh_interval = info->refresh_interval;
 540        sum_info->supported_cas_latencies &= info->supported_cas_latencies;
 541        if (sum_info->min_cas_lat_time < info->min_cas_lat_time)
 542                sum_info->min_cas_lat_time = info->min_cas_lat_time;
 543        if (sum_info->min_write_recovery_time < info->min_write_recovery_time)
 544                sum_info->min_write_recovery_time =
 545                    info->min_write_recovery_time;
 546        if (sum_info->min_ras_to_cas_delay < info->min_ras_to_cas_delay)
 547                sum_info->min_ras_to_cas_delay = info->min_ras_to_cas_delay;
 548        if (sum_info->min_row_active_to_row_active <
 549            info->min_row_active_to_row_active)
 550                sum_info->min_row_active_to_row_active =
 551                    info->min_row_active_to_row_active;
 552        if (sum_info->min_row_precharge_time < info->min_row_precharge_time)
 553                sum_info->min_row_precharge_time = info->min_row_precharge_time;
 554        if (sum_info->min_active_to_precharge < info->min_active_to_precharge)
 555                sum_info->min_active_to_precharge =
 556                    info->min_active_to_precharge;
 557        if (sum_info->min_refresh_recovery < info->min_refresh_recovery)
 558                sum_info->min_refresh_recovery = info->min_refresh_recovery;
 559        if (sum_info->min_write_to_read_cmd_delay <
 560            info->min_write_to_read_cmd_delay)
 561                sum_info->min_write_to_read_cmd_delay =
 562                    info->min_write_to_read_cmd_delay;
 563        if (sum_info->min_read_to_prech_cmd_delay <
 564            info->min_read_to_prech_cmd_delay)
 565                sum_info->min_read_to_prech_cmd_delay =
 566                    info->min_read_to_prech_cmd_delay;
 567        if (sum_info->min_four_active_win_delay <
 568            info->min_four_active_win_delay)
 569                sum_info->min_four_active_win_delay =
 570                    info->min_four_active_win_delay;
 571        if (sum_info->min_write_to_read_cmd_delay <
 572            info->min_write_to_read_cmd_delay)
 573                sum_info->min_write_to_read_cmd_delay =
 574                        info->min_write_to_read_cmd_delay;
 575
 576        return MV_OK;
 577}
 578
 579/*
 580 * Name:     ddr3_dunit_setup
 581 * Desc:     Set the controller with the timing values.
 582 * Args:     ecc_ena - User ECC setup
 583 * Notes:
 584 * Returns:
 585 */
 586int ddr3_dunit_setup(u32 ecc_ena, u32 hclk_time, u32 *ddr_width)
 587{
 588        u32 reg, tmp, cwl;
 589        u32 ddr_clk_time;
 590        MV_DIMM_INFO dimm_info[2];
 591        MV_DIMM_INFO sum_info;
 592        u32 stat_val, spd_val;
 593        u32 cs, cl, cs_num, cs_ena;
 594        u32 dimm_num = 0;
 595        int status;
 596        u32 rc;
 597        __maybe_unused u32 dimm_cnt, cs_count, dimm;
 598        __maybe_unused u32 dimm_addr[2] = { 0, 0 };
 599
 600#if defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710)
 601        /* Armada 370 - SPD is not available on DIMM */
 602        /*
 603         * Set MC registers according to Static SPD values Values -
 604         * must be set manually
 605         */
 606        /*
 607         * We only have one optional DIMM for the DB and we already got the
 608         * SPD matching values
 609         */
 610        status = ddr3_spd_init(&dimm_info[0], 0, *ddr_width);
 611        if (MV_OK != status)
 612                return status;
 613
 614        dimm_num = 1;
 615        /* Use JP8 to enable multiCS support for Armada 370 DB */
 616        if (!ddr3_check_config(EEPROM_MODULE_ADDR, CONFIG_MULTI_CS))
 617                dimm_info[0].num_of_module_ranks = 1;
 618        status = ddr3_spd_sum_init(&dimm_info[0], &sum_info, 0);
 619        if (MV_OK != status)
 620                return status;
 621#else
 622        /* Dynamic D-Unit Setup - Read SPD values */
 623#ifdef DUNIT_SPD
 624        dimm_num = ddr3_get_dimm_num(dimm_addr);
 625        if (dimm_num == 0) {
 626#ifdef MIXED_DIMM_STATIC
 627                DEBUG_INIT_S("DDR3 Training Sequence - No DIMMs detected\n");
 628#else
 629                DEBUG_INIT_S("DDR3 Training Sequence - FAILED (Wrong DIMMs Setup)\n");
 630                return MV_DDR3_TRAINING_ERR_BAD_DIMM_SETUP;
 631#endif
 632        } else {
 633                DEBUG_INIT_C("DDR3 Training Sequence - Number of DIMMs detected: ",
 634                             dimm_num, 1);
 635        }
 636
 637        for (dimm = 0; dimm < dimm_num; dimm++) {
 638                status = ddr3_spd_init(&dimm_info[dimm], dimm_addr[dimm],
 639                                       *ddr_width);
 640                if (MV_OK != status)
 641                        return status;
 642                status = ddr3_spd_sum_init(&dimm_info[dimm], &sum_info, dimm);
 643                if (MV_OK != status)
 644                        return status;
 645        }
 646#endif
 647#endif
 648
 649        /* Set number of enabled CS */
 650        cs_num = 0;
 651#ifdef DUNIT_STATIC
 652        cs_num = ddr3_get_cs_num_from_reg();
 653#endif
 654#ifdef DUNIT_SPD
 655        for (dimm = 0; dimm < dimm_num; dimm++)
 656                cs_num += dimm_info[dimm].num_of_module_ranks;
 657#endif
 658        if (cs_num > MAX_CS) {
 659                DEBUG_INIT_C("DDR3 Training Sequence - Number of CS exceed limit -  ",
 660                             MAX_CS, 1);
 661                return MV_DDR3_TRAINING_ERR_MAX_CS_LIMIT;
 662        }
 663
 664        /* Set bitmap of enabled CS */
 665        cs_ena = 0;
 666#ifdef DUNIT_STATIC
 667        cs_ena = ddr3_get_cs_ena_from_reg();
 668#endif
 669#ifdef DUNIT_SPD
 670        dimm = 0;
 671
 672        if (dimm_num) {
 673                for (cs = 0; cs < MAX_CS; cs += 2) {
 674                        if (((1 << cs) & DIMM_CS_BITMAP) &&
 675                            !(cs_ena & (1 << cs))) {
 676                                if (dimm_info[dimm].num_of_module_ranks == 1)
 677                                        cs_ena |= (0x1 << cs);
 678                                else if (dimm_info[dimm].num_of_module_ranks == 2)
 679                                        cs_ena |= (0x3 << cs);
 680                                else if (dimm_info[dimm].num_of_module_ranks == 3)
 681                                        cs_ena |= (0x7 << cs);
 682                                else if (dimm_info[dimm].num_of_module_ranks == 4)
 683                                        cs_ena |= (0xF << cs);
 684
 685                                dimm++;
 686                                if (dimm == dimm_num)
 687                                        break;
 688                        }
 689                }
 690        }
 691#endif
 692
 693        if (cs_ena > 0xF) {
 694                DEBUG_INIT_C("DDR3 Training Sequence - Number of enabled CS exceed limit -  ",
 695                             MAX_CS, 1);
 696                return MV_DDR3_TRAINING_ERR_MAX_ENA_CS_LIMIT;
 697        }
 698
 699        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Number of CS = ", cs_num, 1);
 700
 701        /* Check Ratio - '1' - 2:1, '0' - 1:1 */
 702        if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
 703                ddr_clk_time = hclk_time / 2;
 704        else
 705                ddr_clk_time = hclk_time;
 706
 707#ifdef DUNIT_STATIC
 708        /* Get target CL value from set register */
 709        reg = (reg_read(REG_DDR3_MR0_ADDR) >> 2);
 710        reg = ((((reg >> 1) & 0xE)) | (reg & 0x1)) & 0xF;
 711
 712        cl = ddr3_get_max_val(ddr3_div(sum_info.min_cas_lat_time,
 713                                       ddr_clk_time, 0),
 714                              dimm_num, ddr3_valid_cl_to_cl(reg));
 715#else
 716        cl = ddr3_div(sum_info.min_cas_lat_time, ddr_clk_time, 0);
 717#endif
 718        if (cl < 5)
 719                cl = 5;
 720
 721        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Cas Latency = ", cl, 1);
 722
 723        /* {0x00001400} -   DDR SDRAM Configuration Register */
 724        reg = 0x73004000;
 725        stat_val = ddr3_get_static_mc_value(
 726                REG_SDRAM_CONFIG_ADDR, REG_SDRAM_CONFIG_ECC_OFFS, 0x1, 0, 0);
 727        if (ecc_ena && ddr3_get_min_val(sum_info.err_check_type, dimm_num,
 728                                        stat_val)) {
 729                reg |= (1 << REG_SDRAM_CONFIG_ECC_OFFS);
 730                reg |= (1 << REG_SDRAM_CONFIG_IERR_OFFS);
 731                DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Enabled\n");
 732        } else {
 733                DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Disabled\n");
 734        }
 735
 736        if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
 737#ifdef DUNIT_STATIC
 738                DEBUG_INIT_S("DDR3 Training Sequence - FAIL - Illegal R-DIMM setup\n");
 739                return MV_DDR3_TRAINING_ERR_BAD_R_DIMM_SETUP;
 740#endif
 741                reg |= (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS);
 742                DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - R-DIMM\n");
 743        } else {
 744                DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - U-DIMM\n");
 745        }
 746
 747#ifndef MV88F67XX
 748#ifdef DUNIT_STATIC
 749        if (ddr3_get_min_val(sum_info.data_width, dimm_num, BUS_WIDTH) == 64) {
 750#else
 751        if (*ddr_width == 64) {
 752#endif
 753                reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
 754                DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 64Bits\n");
 755        } else {
 756                DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
 757        }
 758#else
 759        DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
 760#endif
 761
 762#if defined(MV88F672X)
 763        if (*ddr_width == 32) {
 764                reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
 765                DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
 766        } else {
 767                DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
 768        }
 769#endif
 770        stat_val = ddr3_get_static_mc_value(REG_SDRAM_CONFIG_ADDR, 0,
 771                                               REG_SDRAM_CONFIG_RFRS_MASK, 0, 0);
 772        tmp = ddr3_get_min_val(sum_info.refresh_interval / hclk_time,
 773                               dimm_num, stat_val);
 774
 775#ifdef TREFI_USER_EN
 776        tmp = min(TREFI_USER / hclk_time, tmp);
 777#endif
 778
 779        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - RefreshInterval/Hclk = ", tmp, 4);
 780        reg |= tmp;
 781
 782        if (cl != 3)
 783                reg |= (1 << 16);       /*  If 2:1 need to set P2DWr */
 784
 785#if defined(MV88F672X)
 786        reg |= (1 << 27);       /* PhyRfRST = Disable */
 787#endif
 788        reg_write(REG_SDRAM_CONFIG_ADDR, reg);
 789
 790        /*{0x00001404}  -   DDR SDRAM Configuration Register */
 791        reg = 0x3630B800;
 792#ifdef DUNIT_SPD
 793        reg |= (DRAM_2T << REG_DUNIT_CTRL_LOW_2T_OFFS);
 794#endif
 795        reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
 796
 797        /* {0x00001408}  -   DDR SDRAM Timing (Low) Register */
 798        reg = 0x0;
 799
 800        /* tRAS - (0:3,20) */
 801        spd_val = ddr3_div(sum_info.min_active_to_precharge,
 802                            ddr_clk_time, 1);
 803        stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
 804                                            0, 0xF, 16, 0x10);
 805        tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
 806        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRAS-1 = ", tmp, 1);
 807        reg |= (tmp & 0xF);
 808        reg |= ((tmp & 0x10) << 16);    /* to bit 20 */
 809
 810        /* tRCD - (4:7) */
 811        spd_val = ddr3_div(sum_info.min_ras_to_cas_delay, ddr_clk_time, 1);
 812        stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
 813                                            4, 0xF, 0, 0);
 814        tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
 815        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRCD-1 = ", tmp, 1);
 816        reg |= ((tmp & 0xF) << 4);
 817
 818        /* tRP - (8:11) */
 819        spd_val = ddr3_div(sum_info.min_row_precharge_time, ddr_clk_time, 1);
 820        stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
 821                                            8, 0xF, 0, 0);
 822        tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
 823        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRP-1 = ", tmp, 1);
 824        reg |= ((tmp & 0xF) << 8);
 825
 826        /* tWR - (12:15) */
 827        spd_val = ddr3_div(sum_info.min_write_recovery_time, ddr_clk_time, 1);
 828        stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
 829                                            12, 0xF, 0, 0);
 830        tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
 831        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWR-1 = ", tmp, 1);
 832        reg |= ((tmp & 0xF) << 12);
 833
 834        /* tWTR - (16:19) */
 835        spd_val = ddr3_div(sum_info.min_write_to_read_cmd_delay, ddr_clk_time, 1);
 836        stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
 837                                            16, 0xF, 0, 0);
 838        tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
 839        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWTR-1 = ", tmp, 1);
 840        reg |= ((tmp & 0xF) << 16);
 841
 842        /* tRRD - (24:27) */
 843        spd_val = ddr3_div(sum_info.min_row_active_to_row_active, ddr_clk_time, 1);
 844        stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
 845                                            24, 0xF, 0, 0);
 846        tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
 847        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRRD-1 = ", tmp, 1);
 848        reg |= ((tmp & 0xF) << 24);
 849
 850        /* tRTP - (28:31) */
 851        spd_val = ddr3_div(sum_info.min_read_to_prech_cmd_delay, ddr_clk_time, 1);
 852        stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
 853                                            28, 0xF, 0, 0);
 854        tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
 855        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRTP-1 = ", tmp, 1);
 856        reg |= ((tmp & 0xF) << 28);
 857
 858        if (cl < 7)
 859                reg = 0x33137663;
 860
 861        reg_write(REG_SDRAM_TIMING_LOW_ADDR, reg);
 862
 863        /*{0x0000140C}  -   DDR SDRAM Timing (High) Register */
 864        /* Add cycles to R2R W2W */
 865        reg = 0x39F8FF80;
 866
 867        /* tRFC - (0:6,16:18) */
 868        spd_val = ddr3_div(sum_info.min_refresh_recovery, ddr_clk_time, 1);
 869        stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_HIGH_ADDR,
 870                                            0, 0x7F, 9, 0x380);
 871        tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
 872        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRFC-1 = ", tmp, 1);
 873        reg |= (tmp & 0x7F);
 874        reg |= ((tmp & 0x380) << 9);    /* to bit 16 */
 875        reg_write(REG_SDRAM_TIMING_HIGH_ADDR, reg);
 876
 877        /*{0x00001410}  -   DDR SDRAM Address Control Register */
 878        reg = 0x000F0000;
 879
 880        /* tFAW - (24:28)  */
 881#if (defined(MV88F78X60) || defined(MV88F672X))
 882        tmp = sum_info.min_four_active_win_delay;
 883        spd_val = ddr3_div(tmp, ddr_clk_time, 0);
 884        stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
 885                                            24, 0x3F, 0, 0);
 886        tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
 887        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW = ", tmp, 1);
 888        reg |= ((tmp & 0x3F) << 24);
 889#else
 890        tmp = sum_info.min_four_active_win_delay -
 891                4 * (sum_info.min_row_active_to_row_active);
 892        spd_val = ddr3_div(tmp, ddr_clk_time, 0);
 893        stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
 894                                            24, 0x1F, 0, 0);
 895        tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
 896        DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW-4*tRRD = ", tmp, 1);
 897        reg |= ((tmp & 0x1F) << 24);
 898#endif
 899
 900        /* SDRAM device capacity */
 901#ifdef DUNIT_STATIC
 902        reg |= (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) & 0xF0FFFF);
 903#endif
 904
 905#ifdef DUNIT_SPD
 906        cs_count = 0;
 907        dimm_cnt = 0;
 908        for (cs = 0; cs < MAX_CS; cs++) {
 909                if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
 910                        if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
 911                                dimm_cnt++;
 912                                cs_count = 0;
 913                        }
 914                        cs_count++;
 915                        if (dimm_info[dimm_cnt].sdram_capacity < 0x3) {
 916                                reg |= ((dimm_info[dimm_cnt].sdram_capacity + 1) <<
 917                                        (REG_SDRAM_ADDRESS_SIZE_OFFS +
 918                                         (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
 919                        } else if (dimm_info[dimm_cnt].sdram_capacity > 0x3) {
 920                                reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x3) <<
 921                                        (REG_SDRAM_ADDRESS_SIZE_OFFS +
 922                                         (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
 923                                reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x4) <<
 924                                        (REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs));
 925                        }
 926                }
 927        }
 928
 929        /* SDRAM device structure */
 930        cs_count = 0;
 931        dimm_cnt = 0;
 932        for (cs = 0; cs < MAX_CS; cs++) {
 933                if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
 934                        if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
 935                                dimm_cnt++;
 936                                cs_count = 0;
 937                        }
 938                        cs_count++;
 939                        if (dimm_info[dimm_cnt].sdram_width == 16)
 940                                reg |= (1 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs));
 941                }
 942        }
 943#endif
 944        reg_write(REG_SDRAM_ADDRESS_CTRL_ADDR, reg);
 945
 946        /*{0x00001418}  -   DDR SDRAM Operation Register */
 947        reg = 0xF00;
 948        for (cs = 0; cs < MAX_CS; cs++) {
 949                if (cs_ena & (1 << cs))
 950                        reg &= ~(1 << (cs + REG_SDRAM_OPERATION_CS_OFFS));
 951        }
 952        reg_write(REG_SDRAM_OPERATION_ADDR, reg);
 953
 954        /*{0x00001420}  -   DDR SDRAM Extended Mode Register */
 955        reg = 0x00000004;
 956        reg_write(REG_SDRAM_EXT_MODE_ADDR, reg);
 957
 958        /*{0x00001424}  -   DDR Controller Control (High) Register */
 959#if (defined(MV88F78X60) || defined(MV88F672X))
 960        reg = 0x0000D3FF;
 961#else
 962        reg = 0x0100D1FF;
 963#endif
 964        reg_write(REG_DDR_CONT_HIGH_ADDR, reg);
 965
 966        /*{0x0000142C}  -   DDR3 Timing Register */
 967        reg = 0x014C2F38;
 968#if defined(MV88F78X60) || defined(MV88F672X)
 969        reg = 0x1FEC2F38;
 970#endif
 971        reg_write(0x142C, reg);
 972
 973        /*{0x00001484}  - MBus CPU Block Register */
 974#ifdef MV88F67XX
 975        if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
 976                reg_write(REG_MBUS_CPU_BLOCK_ADDR, 0x0000E907);
 977#endif
 978
 979        /*
 980         * In case of mixed dimm and on-board devices setup paramters will
 981         * be taken statically
 982         */
 983        /*{0x00001494}  -   DDR SDRAM ODT Control (Low) Register */
 984        reg = odt_config[cs_ena];
 985        reg_write(REG_SDRAM_ODT_CTRL_LOW_ADDR, reg);
 986
 987        /*{0x00001498}  -   DDR SDRAM ODT Control (High) Register */
 988        reg = 0x00000000;
 989        reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, reg);
 990
 991        /*{0x0000149C}  -   DDR Dunit ODT Control Register */
 992        reg = cs_ena;
 993        reg_write(REG_DUNIT_ODT_CTRL_ADDR, reg);
 994
 995        /*{0x000014A0}  -   DDR Dunit ODT Control Register */
 996#if defined(MV88F672X)
 997        reg = 0x000006A9;
 998        reg_write(REG_DRAM_FIFO_CTRL_ADDR, reg);
 999#endif
1000
1001        /*{0x000014C0}  -   DRAM address and Control Driving Strenght */
1002        reg_write(REG_DRAM_ADDR_CTRL_DRIVE_STRENGTH_ADDR, 0x192435e9);
1003
1004        /*{0x000014C4}  -   DRAM Data and DQS Driving Strenght */
1005        reg_write(REG_DRAM_DATA_DQS_DRIVE_STRENGTH_ADDR, 0xB2C35E9);
1006
1007#if (defined(MV88F78X60) || defined(MV88F672X))
1008        /*{0x000014CC}  -   DRAM Main Pads Calibration Machine Control Register */
1009        reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
1010        reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg | (1 << 0));
1011#endif
1012
1013#if defined(MV88F672X)
1014        /* DRAM Main Pads Calibration Machine Control Register */
1015        /* 0x14CC[4:3] - CalUpdateControl = IntOnly */
1016        reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
1017        reg &= 0xFFFFFFE7;
1018        reg |= (1 << 3);
1019        reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg);
1020#endif
1021
1022#ifdef DUNIT_SPD
1023        cs_count = 0;
1024        dimm_cnt = 0;
1025        for (cs = 0; cs < MAX_CS; cs++) {
1026                if ((1 << cs) & DIMM_CS_BITMAP) {
1027                        if ((1 << cs) & cs_ena) {
1028                                if (dimm_info[dimm_cnt].num_of_module_ranks ==
1029                                    cs_count) {
1030                                        dimm_cnt++;
1031                                        cs_count = 0;
1032                                }
1033                                cs_count++;
1034                                reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8),
1035                                          dimm_info[dimm_cnt].rank_capacity - 1);
1036                        } else {
1037                                reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8), 0);
1038                        }
1039                }
1040        }
1041#endif
1042
1043        /*{0x00020184}  -   Close FastPath - 2G */
1044        reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, 0);
1045
1046        /*{0x00001538}  -    Read Data Sample Delays Register */
1047        reg = 0;
1048        for (cs = 0; cs < MAX_CS; cs++) {
1049                if (cs_ena & (1 << cs))
1050                        reg |= (cl << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1051        }
1052
1053        reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
1054        DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Sample Delays = ", reg,
1055                          1);
1056
1057        /*{0x0000153C}  -   Read Data Ready Delay Register */
1058        reg = 0;
1059        for (cs = 0; cs < MAX_CS; cs++) {
1060                if (cs_ena & (1 << cs)) {
1061                        reg |= ((cl + 2) <<
1062                                (REG_READ_DATA_READY_DELAYS_OFFS * cs));
1063                }
1064        }
1065        reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
1066        DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Ready Delays = ", reg, 1);
1067
1068        /* Set MR registers */
1069        /* MR0 */
1070        reg = 0x00000600;
1071        tmp = ddr3_cl_to_valid_cl(cl);
1072        reg |= ((tmp & 0x1) << 2);
1073        reg |= ((tmp & 0xE) << 3);      /* to bit 4 */
1074        for (cs = 0; cs < MAX_CS; cs++) {
1075                if (cs_ena & (1 << cs)) {
1076                        reg_write(REG_DDR3_MR0_CS_ADDR +
1077                                  (cs << MR_CS_ADDR_OFFS), reg);
1078                }
1079        }
1080
1081        /* MR1 */
1082        reg = 0x00000044 & REG_DDR3_MR1_ODT_MASK;
1083        if (cs_num > 1)
1084                reg = 0x00000046 & REG_DDR3_MR1_ODT_MASK;
1085
1086        for (cs = 0; cs < MAX_CS; cs++) {
1087                if (cs_ena & (1 << cs)) {
1088                        reg |= odt_static[cs_ena][cs];
1089                        reg_write(REG_DDR3_MR1_CS_ADDR +
1090                                  (cs << MR_CS_ADDR_OFFS), reg);
1091                }
1092        }
1093
1094        /* MR2 */
1095        if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
1096                tmp = hclk_time / 2;
1097        else
1098                tmp = hclk_time;
1099
1100        if (tmp >= 2500)
1101                cwl = 5;        /* CWL = 5 */
1102        else if (tmp >= 1875 && tmp < 2500)
1103                cwl = 6;        /* CWL = 6 */
1104        else if (tmp >= 1500 && tmp < 1875)
1105                cwl = 7;        /* CWL = 7 */
1106        else if (tmp >= 1250 && tmp < 1500)
1107                cwl = 8;        /* CWL = 8 */
1108        else if (tmp >= 1070 && tmp < 1250)
1109                cwl = 9;        /* CWL = 9 */
1110        else if (tmp >= 935 && tmp < 1070)
1111                cwl = 10;       /* CWL = 10 */
1112        else if (tmp >= 833 && tmp < 935)
1113                cwl = 11;       /* CWL = 11 */
1114        else if (tmp >= 750 && tmp < 833)
1115                cwl = 12;       /* CWL = 12 */
1116        else {
1117                cwl = 12;       /* CWL = 12 */
1118                printf("Unsupported hclk %d MHz\n", tmp);
1119        }
1120
1121        reg = ((cwl - 5) << REG_DDR3_MR2_CWL_OFFS);
1122
1123        for (cs = 0; cs < MAX_CS; cs++) {
1124                if (cs_ena & (1 << cs)) {
1125                        reg &= REG_DDR3_MR2_ODT_MASK;
1126                        reg |= odt_dynamic[cs_ena][cs];
1127                        reg_write(REG_DDR3_MR2_CS_ADDR +
1128                                  (cs << MR_CS_ADDR_OFFS), reg);
1129                }
1130        }
1131
1132        /* MR3 */
1133        reg = 0x00000000;
1134        for (cs = 0; cs < MAX_CS; cs++) {
1135                if (cs_ena & (1 << cs)) {
1136                        reg_write(REG_DDR3_MR3_CS_ADDR +
1137                                  (cs << MR_CS_ADDR_OFFS), reg);
1138                }
1139        }
1140
1141        /* {0x00001428}  -   DDR ODT Timing (Low) Register */
1142        reg = 0;
1143        reg |= (((cl - cwl + 1) & 0xF) << 4);
1144        reg |= (((cl - cwl + 6) & 0xF) << 8);
1145        reg |= ((((cl - cwl + 6) >> 4) & 0x1) << 21);
1146        reg |= (((cl - 1) & 0xF) << 12);
1147        reg |= (((cl + 6) & 0x1F) << 16);
1148        reg_write(REG_ODT_TIME_LOW_ADDR, reg);
1149
1150        /* {0x0000147C}  -   DDR ODT Timing (High) Register */
1151        reg = 0x00000071;
1152        reg |= ((cwl - 1) << 8);
1153        reg |= ((cwl + 5) << 12);
1154        reg_write(REG_ODT_TIME_HIGH_ADDR, reg);
1155
1156#ifdef DUNIT_SPD
1157        /*{0x000015E0} - DDR3 Rank Control Register */
1158        reg = cs_ena;
1159        cs_count = 0;
1160        dimm_cnt = 0;
1161        for (cs = 0; cs < MAX_CS; cs++) {
1162                if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
1163                        if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
1164                                dimm_cnt++;
1165                                cs_count = 0;
1166                        }
1167                        cs_count++;
1168
1169                        if (dimm_info[dimm_cnt].addr_mirroring &&
1170                            (cs == 1 || cs == 3) &&
1171                            (sum_info.type_info != SPD_MODULE_TYPE_RDIMM)) {
1172                                reg |= (1 << (REG_DDR3_RANK_CTRL_MIRROR_OFFS + cs));
1173                                DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Setting Address Mirroring for CS = ",
1174                                                  cs, 1);
1175                        }
1176                }
1177        }
1178        reg_write(REG_DDR3_RANK_CTRL_ADDR, reg);
1179#endif
1180
1181        /*{0xD00015E4}  -   ZQDS Configuration Register */
1182        reg = 0x00203c18;
1183        reg_write(REG_ZQC_CONF_ADDR, reg);
1184
1185        /* {0x00015EC}  -   DDR PHY */
1186#if defined(MV88F78X60)
1187        reg = 0xF800AAA5;
1188        if (mv_ctrl_rev_get() == MV_78XX0_B0_REV)
1189                reg = 0xF800A225;
1190#else
1191        reg = 0xDE000025;
1192#if defined(MV88F672X)
1193        reg = 0xF800A225;
1194#endif
1195#endif
1196        reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1197
1198#if (defined(MV88F78X60) || defined(MV88F672X))
1199        /* Registered DIMM support - supported only in AXP A0 devices */
1200        /* Currently supported for SPD detection only */
1201        /*
1202         * Flow is according to the Registered DIMM chapter in the
1203         * Functional Spec
1204         */
1205        if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
1206                DEBUG_INIT_S("DDR3 Training Sequence - Registered DIMM detected\n");
1207
1208                /* Set commands parity completion */
1209                reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
1210                reg &= ~REG_REGISTERED_DRAM_CTRL_PARITY_MASK;
1211                reg |= 0x8;
1212                reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
1213
1214                /* De-assert M_RESETn and assert M_CKE */
1215                reg_write(REG_SDRAM_INIT_CTRL_ADDR,
1216                          1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
1217                do {
1218                        reg = (reg_read(REG_SDRAM_INIT_CTRL_ADDR)) &
1219                                (1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
1220                } while (reg);
1221
1222                for (rc = 0; rc < SPD_RDIMM_RC_NUM; rc++) {
1223                        if (rc != 6 && rc != 7) {
1224                                /* Set CWA Command */
1225                                reg = (REG_SDRAM_OPERATION_CMD_CWA &
1226                                       ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
1227                                reg |= ((dimm_info[0].dimm_rc[rc] &
1228                                         REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1229                                        REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1230                                reg |= rc << REG_SDRAM_OPERATION_CWA_RC_OFFS;
1231                                /* Configure - Set Delay - tSTAB/tMRD */
1232                                if (rc == 2 || rc == 10)
1233                                        reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
1234                                /* 0x1418 - SDRAM Operation Register */
1235                                reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1236
1237                                /*
1238                                 * Poll the "cmd" field in the SDRAM OP
1239                                 * register for 0x0
1240                                 */
1241                                do {
1242                                        reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
1243                                                (REG_SDRAM_OPERATION_CMD_MASK);
1244                                } while (reg);
1245                        }
1246                }
1247        }
1248#endif
1249
1250        return MV_OK;
1251}
1252
1253/*
1254 * Name:     ddr3_div - this function divides integers
1255 * Desc:
1256 * Args:     val - the value
1257 *           divider - the divider
1258 *           sub - substruction value
1259 * Notes:
1260 * Returns:  required value
1261 */
1262u32 ddr3_div(u32 val, u32 divider, u32 sub)
1263{
1264        return val / divider + (val % divider > 0 ? 1 : 0) - sub;
1265}
1266
1267/*
1268 * Name:     ddr3_get_max_val
1269 * Desc:
1270 * Args:
1271 * Notes:
1272 * Returns:
1273 */
1274u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val)
1275{
1276#ifdef DUNIT_STATIC
1277        if (dimm_num > 0) {
1278                if (spd_val >= static_val)
1279                        return spd_val;
1280                else
1281                        return static_val;
1282        } else {
1283                return static_val;
1284        }
1285#else
1286        return spd_val;
1287#endif
1288}
1289
1290/*
1291 * Name:     ddr3_get_min_val
1292 * Desc:
1293 * Args:
1294 * Notes:
1295 * Returns:
1296 */
1297u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val)
1298{
1299#ifdef DUNIT_STATIC
1300        if (dimm_num > 0) {
1301                if (spd_val <= static_val)
1302                        return spd_val;
1303                else
1304                        return static_val;
1305        } else
1306                return static_val;
1307#else
1308        return spd_val;
1309#endif
1310}
1311#endif
1312