uboot/drivers/ddr/marvell/a38x/ddr3_training.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (C) Marvell International Ltd. and its affiliates
   4 */
   5
   6#include "ddr3_init.h"
   7#include "mv_ddr_common.h"
   8#include "mv_ddr_training_db.h"
   9#include "mv_ddr_regs.h"
  10
  11#define GET_CS_FROM_MASK(mask)  (cs_mask2_num[mask])
  12#define CS_CBE_VALUE(cs_num)    (cs_cbe_reg[cs_num])
  13
  14u32 window_mem_addr = 0;
  15u32 phy_reg0_val = 0;
  16u32 phy_reg1_val = 8;
  17u32 phy_reg2_val = 0;
  18u32 phy_reg3_val = PARAM_UNDEFINED;
  19enum mv_ddr_freq low_freq = MV_DDR_FREQ_LOW_FREQ;
  20enum mv_ddr_freq medium_freq;
  21u32 debug_dunit = 0;
  22u32 odt_additional = 1;
  23u32 *dq_map_table = NULL;
  24
  25/* in case of ddr4 do not run ddr3_tip_write_additional_odt_setting function - mc odt always 'on'
  26 * in ddr4 case the terminations are rttWR and rttPARK and the odt must be always 'on' 0x1498 = 0xf
  27 */
  28u32 odt_config = 1;
  29
  30u32 nominal_avs;
  31u32 extension_avs;
  32
  33u32 is_pll_before_init = 0, is_adll_calib_before_init = 1, is_dfs_in_init = 0;
  34u32 dfs_low_freq;
  35
  36u32 g_rtt_nom_cs0, g_rtt_nom_cs1;
  37u8 calibration_update_control;  /* 2 external only, 1 is internal only */
  38
  39enum hws_result training_result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM];
  40enum auto_tune_stage training_stage = INIT_CONTROLLER;
  41u32 finger_test = 0, p_finger_start = 11, p_finger_end = 64,
  42        n_finger_start = 11, n_finger_end = 64,
  43        p_finger_step = 3, n_finger_step = 3;
  44u32 clamp_tbl[] = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 };
  45
  46/* Initiate to 0xff, this variable is define by user in debug mode */
  47u32 mode_2t = 0xff;
  48u32 xsb_validate_type = 0;
  49u32 xsb_validation_base_address = 0xf000;
  50u32 first_active_if = 0;
  51u32 dfs_low_phy1 = 0x1f;
  52u32 multicast_id = 0;
  53int use_broadcast = 0;
  54struct hws_tip_freq_config_info *freq_info_table = NULL;
  55u8 is_cbe_required = 0;
  56u32 debug_mode = 0;
  57u32 delay_enable = 0;
  58int rl_mid_freq_wa = 0;
  59
  60u32 effective_cs = 0;
  61
  62u32 vref_init_val = 0x4;
  63u32 ck_delay = PARAM_UNDEFINED;
  64
  65/* Design guidelines parameters */
  66u32 g_zpri_data = PARAM_UNDEFINED; /* controller data - P drive strength */
  67u32 g_znri_data = PARAM_UNDEFINED; /* controller data - N drive strength */
  68u32 g_zpri_ctrl = PARAM_UNDEFINED; /* controller C/A - P drive strength */
  69u32 g_znri_ctrl = PARAM_UNDEFINED; /* controller C/A - N drive strength */
  70
  71u32 g_zpodt_data = PARAM_UNDEFINED; /* controller data - P ODT */
  72u32 g_znodt_data = PARAM_UNDEFINED; /* controller data - N ODT */
  73u32 g_zpodt_ctrl = PARAM_UNDEFINED; /* controller data - P ODT */
  74u32 g_znodt_ctrl = PARAM_UNDEFINED; /* controller data - N ODT */
  75
  76u32 g_odt_config = PARAM_UNDEFINED;
  77u32 g_rtt_nom = PARAM_UNDEFINED;
  78u32 g_rtt_wr = PARAM_UNDEFINED;
  79u32 g_dic = PARAM_UNDEFINED;
  80u32 g_rtt_park = PARAM_UNDEFINED;
  81
  82u32 mask_tune_func = (SET_MEDIUM_FREQ_MASK_BIT |
  83                      WRITE_LEVELING_MASK_BIT |
  84                      LOAD_PATTERN_2_MASK_BIT |
  85                      READ_LEVELING_MASK_BIT |
  86                      SET_TARGET_FREQ_MASK_BIT |
  87                      WRITE_LEVELING_TF_MASK_BIT |
  88                      READ_LEVELING_TF_MASK_BIT |
  89                      CENTRALIZATION_RX_MASK_BIT |
  90                      CENTRALIZATION_TX_MASK_BIT);
  91
  92static int ddr3_tip_ddr3_training_main_flow(u32 dev_num);
  93static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type,
  94                              u32 if_id, u32 cl_value, u32 cwl_value);
  95static int ddr3_tip_ddr3_auto_tune(u32 dev_num);
  96
  97#ifdef ODT_TEST_SUPPORT
  98static int odt_test(u32 dev_num, enum hws_algo_type algo_type);
  99#endif
 100
 101int adll_calibration(u32 dev_num, enum hws_access_type access_type,
 102                     u32 if_id, enum mv_ddr_freq frequency);
 103static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type,
 104                               u32 if_id, enum mv_ddr_freq frequency);
 105
 106static u8 mem_size_config[MV_DDR_DIE_CAP_LAST] = {
 107        0x2,                    /* 512Mbit  */
 108        0x3,                    /* 1Gbit    */
 109        0x0,                    /* 2Gbit    */
 110        0x4,                    /* 4Gbit    */
 111        0x5,                    /* 8Gbit    */
 112        0x0, /* TODO: placeholder for 16-Mbit die capacity */
 113        0x0, /* TODO: placeholder for 32-Mbit die capacity */
 114        0x0, /* TODO: placeholder for 12-Mbit die capacity */
 115        0x0  /* TODO: placeholder for 24-Mbit die capacity */
 116};
 117
 118static u8 cs_mask2_num[] = { 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3 };
 119
 120static struct reg_data odpg_default_value[] = {
 121        {0x1034, 0x38000, MASK_ALL_BITS},
 122        {0x1038, 0x0, MASK_ALL_BITS},
 123        {0x10b0, 0x0, MASK_ALL_BITS},
 124        {0x10b8, 0x0, MASK_ALL_BITS},
 125        {0x10c0, 0x0, MASK_ALL_BITS},
 126        {0x10f0, 0x0, MASK_ALL_BITS},
 127        {0x10f4, 0x0, MASK_ALL_BITS},
 128        {0x10f8, 0xff, MASK_ALL_BITS},
 129        {0x10fc, 0xffff, MASK_ALL_BITS},
 130        {0x1130, 0x0, MASK_ALL_BITS},
 131        {0x1830, 0x2000000, MASK_ALL_BITS},
 132        {0x14d0, 0x0, MASK_ALL_BITS},
 133        {0x14d4, 0x0, MASK_ALL_BITS},
 134        {0x14d8, 0x0, MASK_ALL_BITS},
 135        {0x14dc, 0x0, MASK_ALL_BITS},
 136        {0x1454, 0x0, MASK_ALL_BITS},
 137        {0x1594, 0x0, MASK_ALL_BITS},
 138        {0x1598, 0x0, MASK_ALL_BITS},
 139        {0x159c, 0x0, MASK_ALL_BITS},
 140        {0x15a0, 0x0, MASK_ALL_BITS},
 141        {0x15a4, 0x0, MASK_ALL_BITS},
 142        {0x15a8, 0x0, MASK_ALL_BITS},
 143        {0x15ac, 0x0, MASK_ALL_BITS},
 144        {0x1600, 0x0, MASK_ALL_BITS},
 145        {0x1604, 0x0, MASK_ALL_BITS},
 146        {0x1608, 0x0, MASK_ALL_BITS},
 147        {0x160c, 0x0, MASK_ALL_BITS},
 148        {0x1610, 0x0, MASK_ALL_BITS},
 149        {0x1614, 0x0, MASK_ALL_BITS},
 150        {0x1618, 0x0, MASK_ALL_BITS},
 151        {0x1624, 0x0, MASK_ALL_BITS},
 152        {0x1690, 0x0, MASK_ALL_BITS},
 153        {0x1694, 0x0, MASK_ALL_BITS},
 154        {0x1698, 0x0, MASK_ALL_BITS},
 155        {0x169c, 0x0, MASK_ALL_BITS},
 156        {0x14b8, 0x6f67, MASK_ALL_BITS},
 157        {0x1630, 0x0, MASK_ALL_BITS},
 158        {0x1634, 0x0, MASK_ALL_BITS},
 159        {0x1638, 0x0, MASK_ALL_BITS},
 160        {0x163c, 0x0, MASK_ALL_BITS},
 161        {0x16b0, 0x0, MASK_ALL_BITS},
 162        {0x16b4, 0x0, MASK_ALL_BITS},
 163        {0x16b8, 0x0, MASK_ALL_BITS},
 164        {0x16bc, 0x0, MASK_ALL_BITS},
 165        {0x16c0, 0x0, MASK_ALL_BITS},
 166        {0x16c4, 0x0, MASK_ALL_BITS},
 167        {0x16c8, 0x0, MASK_ALL_BITS},
 168        {0x16cc, 0x1, MASK_ALL_BITS},
 169        {0x16f0, 0x1, MASK_ALL_BITS},
 170        {0x16f4, 0x0, MASK_ALL_BITS},
 171        {0x16f8, 0x0, MASK_ALL_BITS},
 172        {0x16fc, 0x0, MASK_ALL_BITS}
 173};
 174
 175/* MR cmd and addr definitions */
 176struct mv_ddr_mr_data mr_data[] = {
 177        {MRS0_CMD, MR0_REG},
 178        {MRS1_CMD, MR1_REG},
 179        {MRS2_CMD, MR2_REG},
 180        {MRS3_CMD, MR3_REG}
 181};
 182
 183/* inverse pads */
 184static int ddr3_tip_pad_inv(void)
 185{
 186        u32 sphy, data;
 187        u32 sphy_max = ddr3_tip_dev_attr_get(0, MV_ATTR_OCTET_PER_INTERFACE);
 188        u32 ck_swap_ctrl_sphy;
 189        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
 190
 191        for (sphy = 0; sphy < sphy_max; sphy++) {
 192                VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sphy);
 193                if (tm->interface_params[0].
 194                    as_bus_params[sphy].is_dqs_swap == 1) {
 195                        data = (INVERT_PAD << INV_PAD4_OFFS |
 196                                INVERT_PAD << INV_PAD5_OFFS);
 197                        /* dqs swap */
 198                        ddr3_tip_bus_read_modify_write(0, ACCESS_TYPE_UNICAST,
 199                                                       0, sphy,
 200                                                       DDR_PHY_DATA,
 201                                                       PHY_CTRL_PHY_REG,
 202                                                       data, data);
 203                }
 204
 205                if (tm->interface_params[0].as_bus_params[sphy].
 206                    is_ck_swap == 1 && sphy == 0) {
 207/* TODO: move this code to per platform one */
 208                        /* clock swap for both cs0 and cs1 */
 209                        data = (INVERT_PAD << INV_PAD2_OFFS |
 210                                INVERT_PAD << INV_PAD6_OFFS |
 211                                INVERT_PAD << INV_PAD4_OFFS |
 212                                INVERT_PAD << INV_PAD5_OFFS);
 213                        ck_swap_ctrl_sphy = CK_SWAP_CTRL_PHY_NUM;
 214                        ddr3_tip_bus_read_modify_write(0, ACCESS_TYPE_UNICAST,
 215                                                       0, ck_swap_ctrl_sphy,
 216                                                       DDR_PHY_CONTROL,
 217                                                       PHY_CTRL_PHY_REG,
 218                                                       data, data);
 219                }
 220        }
 221
 222        return MV_OK;
 223}
 224
 225static int ddr3_tip_rank_control(u32 dev_num, u32 if_id);
 226
 227/*
 228 * Update global training parameters by data from user
 229 */
 230int ddr3_tip_tune_training_params(u32 dev_num,
 231                                  struct tune_train_params *params)
 232{
 233        if (params->ck_delay != PARAM_UNDEFINED)
 234                ck_delay = params->ck_delay;
 235        if (params->phy_reg3_val != PARAM_UNDEFINED)
 236                phy_reg3_val = params->phy_reg3_val;
 237        if (params->g_rtt_nom != PARAM_UNDEFINED)
 238                g_rtt_nom = params->g_rtt_nom;
 239        if (params->g_rtt_wr != PARAM_UNDEFINED)
 240                g_rtt_wr = params->g_rtt_wr;
 241        if (params->g_dic != PARAM_UNDEFINED)
 242                g_dic = params->g_dic;
 243        if (params->g_odt_config != PARAM_UNDEFINED)
 244                g_odt_config = params->g_odt_config;
 245        if (params->g_zpri_data != PARAM_UNDEFINED)
 246                g_zpri_data = params->g_zpri_data;
 247        if (params->g_znri_data != PARAM_UNDEFINED)
 248                g_znri_data = params->g_znri_data;
 249        if (params->g_zpri_ctrl != PARAM_UNDEFINED)
 250                g_zpri_ctrl = params->g_zpri_ctrl;
 251        if (params->g_znri_ctrl != PARAM_UNDEFINED)
 252                g_znri_ctrl = params->g_znri_ctrl;
 253        if (params->g_zpodt_data != PARAM_UNDEFINED)
 254                g_zpodt_data = params->g_zpodt_data;
 255        if (params->g_znodt_data != PARAM_UNDEFINED)
 256                g_znodt_data = params->g_znodt_data;
 257        if (params->g_zpodt_ctrl != PARAM_UNDEFINED)
 258                g_zpodt_ctrl = params->g_zpodt_ctrl;
 259        if (params->g_znodt_ctrl != PARAM_UNDEFINED)
 260                g_znodt_ctrl = params->g_znodt_ctrl;
 261        if (params->g_rtt_park != PARAM_UNDEFINED)
 262                g_rtt_park = params->g_rtt_park;
 263
 264
 265        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
 266                          ("DGL parameters: 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X 0x%X\n",
 267                           g_zpri_data, g_znri_data, g_zpri_ctrl, g_znri_ctrl, g_zpodt_data, g_znodt_data,
 268                           g_zpodt_ctrl, g_znodt_ctrl, g_rtt_nom, g_dic, g_odt_config, g_rtt_wr));
 269
 270        return MV_OK;
 271}
 272
 273/*
 274 * Configure CS
 275 */
 276int ddr3_tip_configure_cs(u32 dev_num, u32 if_id, u32 cs_num, u32 enable)
 277{
 278        u32 data, addr_hi, data_high;
 279        u32 mem_index;
 280        u32 clk_enable;
 281        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
 282
 283        if (tm->clk_enable & (1 << cs_num))
 284                clk_enable = 1;
 285        else
 286                clk_enable = enable;
 287
 288        if (enable == 1) {
 289                data = (tm->interface_params[if_id].bus_width ==
 290                        MV_DDR_DEV_WIDTH_8BIT) ? 0 : 1;
 291                CHECK_STATUS(ddr3_tip_if_write
 292                             (dev_num, ACCESS_TYPE_UNICAST, if_id,
 293                              SDRAM_ADDR_CTRL_REG, (data << (cs_num * 4)),
 294                              0x3 << (cs_num * 4)));
 295                mem_index = tm->interface_params[if_id].memory_size;
 296
 297                addr_hi = mem_size_config[mem_index] & 0x3;
 298                CHECK_STATUS(ddr3_tip_if_write
 299                             (dev_num, ACCESS_TYPE_UNICAST, if_id,
 300                              SDRAM_ADDR_CTRL_REG,
 301                              (addr_hi << (2 + cs_num * 4)),
 302                              0x3 << (2 + cs_num * 4)));
 303
 304                data_high = (mem_size_config[mem_index] & 0x4) >> 2;
 305                CHECK_STATUS(ddr3_tip_if_write
 306                             (dev_num, ACCESS_TYPE_UNICAST, if_id,
 307                              SDRAM_ADDR_CTRL_REG,
 308                              data_high << (20 + cs_num), 1 << (20 + cs_num)));
 309
 310                /* Enable Address Select Mode */
 311                CHECK_STATUS(ddr3_tip_if_write
 312                             (dev_num, ACCESS_TYPE_UNICAST, if_id,
 313                              SDRAM_ADDR_CTRL_REG, 1 << (16 + cs_num),
 314                              1 << (16 + cs_num)));
 315        }
 316        switch (cs_num) {
 317        case 0:
 318        case 1:
 319        case 2:
 320                CHECK_STATUS(ddr3_tip_if_write
 321                             (dev_num, ACCESS_TYPE_UNICAST, if_id,
 322                              DUNIT_CTRL_LOW_REG, (clk_enable << (cs_num + 11)),
 323                              1 << (cs_num + 11)));
 324                break;
 325        case 3:
 326                CHECK_STATUS(ddr3_tip_if_write
 327                             (dev_num, ACCESS_TYPE_UNICAST, if_id,
 328                              DUNIT_CTRL_LOW_REG, (clk_enable << 15), 1 << 15));
 329                break;
 330        }
 331
 332        return MV_OK;
 333}
 334
 335/*
 336 * Init Controller Flow
 337 */
 338int hws_ddr3_tip_init_controller(u32 dev_num, struct init_cntr_param *init_cntr_prm)
 339{
 340        u32 if_id;
 341        u32 cs_num;
 342        u32 t_ckclk = 0, t_wr = 0, t2t = 0;
 343        u32 data_value = 0, cs_cnt = 0,
 344                mem_mask = 0, bus_index = 0;
 345        enum mv_ddr_speed_bin speed_bin_index = SPEED_BIN_DDR_2133N;
 346        u32 cs_mask = 0;
 347        u32 cl_value = 0, cwl_val = 0;
 348        u32 bus_cnt = 0, adll_tap = 0;
 349        enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
 350        u32 data_read[MAX_INTERFACE_NUM];
 351        u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
 352        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
 353        enum mv_ddr_timing timing;
 354        enum mv_ddr_freq freq = tm->interface_params[0].memory_freq;
 355
 356        DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
 357                          ("Init_controller, do_mrs_phy=%d, is_ctrl64_bit=%d\n",
 358                           init_cntr_prm->do_mrs_phy,
 359                           init_cntr_prm->is_ctrl64_bit));
 360
 361        if (init_cntr_prm->init_phy == 1) {
 362                CHECK_STATUS(ddr3_tip_configure_phy(dev_num));
 363        }
 364
 365        if (generic_init_controller == 1) {
 366                for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
 367                        VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
 368                        DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
 369                                          ("active IF %d\n", if_id));
 370                        mem_mask = 0;
 371                        for (bus_index = 0;
 372                             bus_index < octets_per_if_num;
 373                             bus_index++) {
 374                                VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index);
 375                                mem_mask |=
 376                                        tm->interface_params[if_id].
 377                                        as_bus_params[bus_index].mirror_enable_bitmask;
 378                        }
 379
 380                        if (mem_mask != 0) {
 381                                CHECK_STATUS(ddr3_tip_if_write
 382                                             (dev_num, ACCESS_TYPE_MULTICAST,
 383                                              if_id, DUAL_DUNIT_CFG_REG, 0,
 384                                              0x8));
 385                        }
 386
 387                        speed_bin_index =
 388                                tm->interface_params[if_id].
 389                                speed_bin_index;
 390
 391                        /* t_ckclk is external clock */
 392                        t_ckclk = (MEGA / mv_ddr_freq_get(freq));
 393
 394                        if (MV_DDR_IS_HALF_BUS_DRAM_MODE(tm->bus_act_mask, octets_per_if_num))
 395                                data_value = (0x4000 | 0 | 0x1000000) & ~(1 << 26);
 396                        else
 397                                data_value = (0x4000 | 0x8000 | 0x1000000) & ~(1 << 26);
 398
 399                        /* Interface Bus Width */
 400                        /* SRMode */
 401                        CHECK_STATUS(ddr3_tip_if_write
 402                                     (dev_num, access_type, if_id,
 403                                      SDRAM_CFG_REG, data_value,
 404                                      0x100c000));
 405
 406                        /* Interleave first command pre-charge enable (TBD) */
 407                        CHECK_STATUS(ddr3_tip_if_write
 408                                     (dev_num, access_type, if_id,
 409                                      SDRAM_OPEN_PAGES_CTRL_REG, (1 << 10),
 410                                      (1 << 10)));
 411
 412                        /* Reset divider_b assert -> de-assert */
 413                        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
 414                                                       SDRAM_CFG_REG,
 415                                                       0x0 << PUP_RST_DIVIDER_OFFS,
 416                                                       PUP_RST_DIVIDER_MASK << PUP_RST_DIVIDER_OFFS));
 417
 418                        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
 419                                                       SDRAM_CFG_REG,
 420                                                       0x1 << PUP_RST_DIVIDER_OFFS,
 421                                                       PUP_RST_DIVIDER_MASK << PUP_RST_DIVIDER_OFFS));
 422
 423                        /* PHY configuration */
 424                        /*
 425                         * Postamble Length = 1.5cc, Addresscntl to clk skew
 426                         * \BD, Preamble length normal, parralal ADLL enable
 427                         */
 428                        CHECK_STATUS(ddr3_tip_if_write
 429                                     (dev_num, access_type, if_id,
 430                                      DRAM_PHY_CFG_REG, 0x28, 0x3e));
 431                        if (init_cntr_prm->is_ctrl64_bit) {
 432                                /* positive edge */
 433                                CHECK_STATUS(ddr3_tip_if_write
 434                                             (dev_num, access_type, if_id,
 435                                              DRAM_PHY_CFG_REG, 0x0,
 436                                              0xff80));
 437                        }
 438
 439                        /* calibration block disable */
 440                        /* Xbar Read buffer select (for Internal access) */
 441                        CHECK_STATUS(ddr3_tip_if_write
 442                                     (dev_num, access_type, if_id,
 443                                      MAIN_PADS_CAL_MACH_CTRL_REG, 0x1200c,
 444                                      0x7dffe01c));
 445                        CHECK_STATUS(ddr3_tip_if_write
 446                                     (dev_num, access_type, if_id,
 447                                      MAIN_PADS_CAL_MACH_CTRL_REG,
 448                                      calibration_update_control << 3, 0x3 << 3));
 449
 450                        /* Pad calibration control - enable */
 451                        CHECK_STATUS(ddr3_tip_if_write
 452                                     (dev_num, access_type, if_id,
 453                                      MAIN_PADS_CAL_MACH_CTRL_REG, 0x1, 0x1));
 454                        if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) {
 455                                /* DDR3 rank ctrl \96 part of the generic code */
 456                                /* CS1 mirroring enable + w/a for JIRA DUNIT-14581 */
 457                                CHECK_STATUS(ddr3_tip_if_write
 458                                             (dev_num, access_type, if_id,
 459                                              DDR3_RANK_CTRL_REG, 0x27, MASK_ALL_BITS));
 460                        }
 461
 462                        cs_mask = 0;
 463                        data_value = 0x7;
 464                        /*
 465                         * Address ctrl \96 Part of the Generic code
 466                         * The next configuration is done:
 467                         * 1)  Memory Size
 468                         * 2) Bus_width
 469                         * 3) CS#
 470                         * 4) Page Number
 471                         * Per Dunit get from the Map_topology the parameters:
 472                         * Bus_width
 473                         */
 474
 475                        data_value =
 476                                (tm->interface_params[if_id].
 477                                 bus_width == MV_DDR_DEV_WIDTH_8BIT) ? 0 : 1;
 478
 479                        /* create merge cs mask for all cs available in dunit */
 480                        for (bus_cnt = 0;
 481                             bus_cnt < octets_per_if_num;
 482                             bus_cnt++) {
 483                                VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
 484                                cs_mask |=
 485                                        tm->interface_params[if_id].
 486                                        as_bus_params[bus_cnt].cs_bitmask;
 487                        }
 488                        DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
 489                                          ("Init_controller IF %d cs_mask %d\n",
 490                                           if_id, cs_mask));
 491                        /*
 492                         * Configure the next upon the Map Topology \96 If the
 493                         * Dunit is CS0 Configure CS0 if it is multi CS
 494                         * configure them both:  The Bust_width it\92s the
 495                         * Memory Bus width \96 x8 or x16
 496                         */
 497                        for (cs_cnt = 0; cs_cnt < MAX_CS_NUM; cs_cnt++) {
 498                                ddr3_tip_configure_cs(dev_num, if_id, cs_cnt,
 499                                                      ((cs_mask & (1 << cs_cnt)) ? 1
 500                                                       : 0));
 501                        }
 502
 503                        if (init_cntr_prm->do_mrs_phy) {
 504                                /*
 505                                 * MR0 \96 Part of the Generic code
 506                                 * The next configuration is done:
 507                                 * 1) Burst Length
 508                                 * 2) CAS Latency
 509                                 * get for each dunit what is it Speed_bin &
 510                                 * Target Frequency. From those both parameters
 511                                 * get the appropriate Cas_l from the CL table
 512                                 */
 513                                cl_value =
 514                                        tm->interface_params[if_id].
 515                                        cas_l;
 516                                cwl_val =
 517                                        tm->interface_params[if_id].
 518                                        cas_wl;
 519                                DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
 520                                                  ("cl_value 0x%x cwl_val 0x%x\n",
 521                                                   cl_value, cwl_val));
 522
 523                                t_wr = time_to_nclk(mv_ddr_speed_bin_timing_get
 524                                                           (speed_bin_index,
 525                                                            SPEED_BIN_TWR), t_ckclk);
 526
 527                                data_value =
 528                                        ((cl_mask_table[cl_value] & 0x1) << 2) |
 529                                        ((cl_mask_table[cl_value] & 0xe) << 3);
 530                                CHECK_STATUS(ddr3_tip_if_write
 531                                             (dev_num, access_type, if_id,
 532                                              MR0_REG, data_value,
 533                                              (0x7 << 4) | (1 << 2)));
 534                                CHECK_STATUS(ddr3_tip_if_write
 535                                             (dev_num, access_type, if_id,
 536                                              MR0_REG, twr_mask_table[t_wr] << 9,
 537                                              0x7 << 9));
 538
 539                                /*
 540                                 * MR1: Set RTT and DIC Design GL values
 541                                 * configured by user
 542                                 */
 543                                CHECK_STATUS(ddr3_tip_if_write
 544                                             (dev_num, ACCESS_TYPE_MULTICAST,
 545                                              PARAM_NOT_CARE, MR1_REG,
 546                                              g_dic | g_rtt_nom, 0x266));
 547
 548                                /* MR2 - Part of the Generic code */
 549                                /*
 550                                 * The next configuration is done:
 551                                 * 1)  SRT
 552                                 * 2) CAS Write Latency
 553                                 */
 554                                data_value = (cwl_mask_table[cwl_val] << 3);
 555                                data_value |=
 556                                        ((tm->interface_params[if_id].
 557                                          interface_temp ==
 558                                          MV_DDR_TEMP_HIGH) ? (1 << 7) : 0);
 559                                data_value |= g_rtt_wr;
 560                                CHECK_STATUS(ddr3_tip_if_write
 561                                             (dev_num, access_type, if_id,
 562                                              MR2_REG, data_value,
 563                                              (0x7 << 3) | (0x1 << 7) | (0x3 <<
 564                                                                         9)));
 565                        }
 566
 567                        ddr3_tip_write_odt(dev_num, access_type, if_id,
 568                                           cl_value, cwl_val);
 569                        ddr3_tip_set_timing(dev_num, access_type, if_id, freq);
 570
 571                        if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) {
 572                                CHECK_STATUS(ddr3_tip_if_write
 573                                             (dev_num, access_type, if_id,
 574                                              DUNIT_CTRL_HIGH_REG, 0x1000119,
 575                                              0x100017F));
 576                        } else {
 577                                CHECK_STATUS(ddr3_tip_if_write
 578                                             (dev_num, access_type, if_id,
 579                                              DUNIT_CTRL_HIGH_REG, 0x600177 |
 580                                              (init_cntr_prm->is_ctrl64_bit ?
 581                                              CPU_INTERJECTION_ENA_SPLIT_ENA << CPU_INTERJECTION_ENA_OFFS :
 582                                              CPU_INTERJECTION_ENA_SPLIT_DIS << CPU_INTERJECTION_ENA_OFFS),
 583                                              0x1600177 | CPU_INTERJECTION_ENA_MASK <<
 584                                              CPU_INTERJECTION_ENA_OFFS));
 585                        }
 586
 587                        /* reset bit 7 */
 588                        CHECK_STATUS(ddr3_tip_if_write
 589                                     (dev_num, access_type, if_id,
 590                                      DUNIT_CTRL_HIGH_REG,
 591                                      (init_cntr_prm->msys_init << 7), (1 << 7)));
 592
 593                        timing = tm->interface_params[if_id].timing;
 594
 595                        if (mode_2t != 0xff) {
 596                                t2t = mode_2t;
 597                        } else if (timing != MV_DDR_TIM_DEFAULT) {
 598                                t2t = (timing == MV_DDR_TIM_2T) ? 1 : 0;
 599                        } else {
 600                                /* calculate number of CS (per interface) */
 601                                cs_num = mv_ddr_cs_num_get();
 602                                t2t = (cs_num == 1) ? 0 : 1;
 603                        }
 604
 605                        CHECK_STATUS(ddr3_tip_if_write
 606                                     (dev_num, access_type, if_id,
 607                                      DUNIT_CTRL_LOW_REG, t2t << 3,
 608                                      0x3 << 3));
 609                        CHECK_STATUS(ddr3_tip_if_write
 610                                     (dev_num, access_type, if_id,
 611                                      DDR_TIMING_REG, 0x28 << 9, 0x3f << 9));
 612                        CHECK_STATUS(ddr3_tip_if_write
 613                                     (dev_num, access_type, if_id,
 614                                      DDR_TIMING_REG, 0xa << 21, 0xff << 21));
 615
 616                        /* move the block to ddr3_tip_set_timing - end */
 617                        /* AUTO_ZQC_TIMING */
 618                        CHECK_STATUS(ddr3_tip_if_write
 619                                     (dev_num, access_type, if_id,
 620                                      ZQC_CFG_REG, (AUTO_ZQC_TIMING | (2 << 20)),
 621                                      0x3fffff));
 622                        CHECK_STATUS(ddr3_tip_if_read
 623                                     (dev_num, access_type, if_id,
 624                                      DRAM_PHY_CFG_REG, data_read, 0x30));
 625                        data_value =
 626                                (data_read[if_id] == 0) ? (1 << 11) : 0;
 627                        CHECK_STATUS(ddr3_tip_if_write
 628                                     (dev_num, access_type, if_id,
 629                                      DUNIT_CTRL_HIGH_REG, data_value,
 630                                      (1 << 11)));
 631
 632                        /* Set Active control for ODT write transactions */
 633                        CHECK_STATUS(ddr3_tip_if_write
 634                                     (dev_num, ACCESS_TYPE_MULTICAST,
 635                                      PARAM_NOT_CARE, 0x1494, g_odt_config,
 636                                      MASK_ALL_BITS));
 637
 638                        if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) == MV_TIP_REV_3) {
 639                                CHECK_STATUS(ddr3_tip_if_write
 640                                             (dev_num, access_type, if_id,
 641                                              0x14a8, 0x900, 0x900));
 642                                /* wa: controls control sub-phy outputs floating during self-refresh */
 643                                CHECK_STATUS(ddr3_tip_if_write
 644                                             (dev_num, access_type, if_id,
 645                                              0x16d0, 0, 0x8000));
 646                        }
 647                }
 648        }
 649
 650        for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
 651                VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
 652                CHECK_STATUS(ddr3_tip_rank_control(dev_num, if_id));
 653
 654                if (init_cntr_prm->do_mrs_phy)
 655                        ddr3_tip_pad_inv();
 656
 657                /* Pad calibration control - disable */
 658                CHECK_STATUS(ddr3_tip_if_write
 659                             (dev_num, access_type, if_id,
 660                              MAIN_PADS_CAL_MACH_CTRL_REG, 0x0, 0x1));
 661                CHECK_STATUS(ddr3_tip_if_write
 662                             (dev_num, access_type, if_id,
 663                              MAIN_PADS_CAL_MACH_CTRL_REG,
 664                              calibration_update_control << 3, 0x3 << 3));
 665        }
 666
 667
 668        if (delay_enable != 0) {
 669                adll_tap = MEGA / (mv_ddr_freq_get(freq) * 64);
 670                ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap);
 671        }
 672
 673        return MV_OK;
 674}
 675
 676/*
 677 * Rank Control Flow
 678 */
 679static int ddr3_tip_rev2_rank_control(u32 dev_num, u32 if_id)
 680{
 681        u32 data_value = 0,  bus_cnt = 0;
 682        u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
 683        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
 684
 685        for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
 686                VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
 687                data_value |= tm->interface_params[if_id].as_bus_params[bus_cnt].
 688                              cs_bitmask;
 689
 690                if (tm->interface_params[if_id].as_bus_params[bus_cnt].
 691                    mirror_enable_bitmask == 1) {
 692                        /*
 693                         * Check mirror_enable_bitmask
 694                         * If it is enabled, CS + 4 bit in a word to be '1'
 695                         */
 696                        if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
 697                             cs_bitmask & 0x1) != 0) {
 698                                data_value |= tm->interface_params[if_id].
 699                                              as_bus_params[bus_cnt].
 700                                              mirror_enable_bitmask << 4;
 701                        }
 702
 703                        if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
 704                             cs_bitmask & 0x2) != 0) {
 705                                data_value |= tm->interface_params[if_id].
 706                                              as_bus_params[bus_cnt].
 707                                              mirror_enable_bitmask << 5;
 708                        }
 709
 710                        if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
 711                             cs_bitmask & 0x4) != 0) {
 712                                data_value |= tm->interface_params[if_id].
 713                                              as_bus_params[bus_cnt].
 714                                              mirror_enable_bitmask << 6;
 715                        }
 716
 717                        if ((tm->interface_params[if_id].as_bus_params[bus_cnt].
 718                             cs_bitmask & 0x8) != 0) {
 719                                data_value |= tm->interface_params[if_id].
 720                                              as_bus_params[bus_cnt].
 721                                              mirror_enable_bitmask << 7;
 722                        }
 723                }
 724        }
 725
 726        CHECK_STATUS(ddr3_tip_if_write
 727                     (dev_num, ACCESS_TYPE_UNICAST, if_id, DDR3_RANK_CTRL_REG,
 728                      data_value, 0xff));
 729
 730        return MV_OK;
 731}
 732
 733static int ddr3_tip_rev3_rank_control(u32 dev_num, u32 if_id)
 734{
 735        u32 data_value = 0, bus_cnt;
 736        u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
 737        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
 738
 739        for (bus_cnt = 1; bus_cnt < octets_per_if_num; bus_cnt++) {
 740                VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
 741                if ((tm->interface_params[if_id].
 742                     as_bus_params[0].cs_bitmask !=
 743                     tm->interface_params[if_id].
 744                     as_bus_params[bus_cnt].cs_bitmask) ||
 745                    (tm->interface_params[if_id].
 746                     as_bus_params[0].mirror_enable_bitmask !=
 747                     tm->interface_params[if_id].
 748                     as_bus_params[bus_cnt].mirror_enable_bitmask))
 749                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
 750                                          ("WARNING:Wrong configuration for pup #%d CS mask and CS mirroring for all pups should be the same\n",
 751                                           bus_cnt));
 752        }
 753
 754        data_value |= tm->interface_params[if_id].
 755                as_bus_params[0].cs_bitmask;
 756        data_value |= tm->interface_params[if_id].
 757                as_bus_params[0].mirror_enable_bitmask << 4;
 758
 759        CHECK_STATUS(ddr3_tip_if_write
 760                     (dev_num, ACCESS_TYPE_UNICAST, if_id, DDR3_RANK_CTRL_REG,
 761                      data_value, 0xff));
 762
 763        return MV_OK;
 764}
 765
 766static int ddr3_tip_rank_control(u32 dev_num, u32 if_id)
 767{
 768        if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) == MV_TIP_REV_2)
 769                return ddr3_tip_rev2_rank_control(dev_num, if_id);
 770        else
 771                return ddr3_tip_rev3_rank_control(dev_num, if_id);
 772}
 773
 774/*
 775 * Algorithm Parameters Validation
 776 */
 777int ddr3_tip_validate_algo_var(u32 value, u32 fail_value, char *var_name)
 778{
 779        if (value == fail_value) {
 780                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
 781                                  ("Error: %s is not initialized (Algo Components Validation)\n",
 782                                   var_name));
 783                return 0;
 784        }
 785
 786        return 1;
 787}
 788
 789int ddr3_tip_validate_algo_ptr(void *ptr, void *fail_value, char *ptr_name)
 790{
 791        if (ptr == fail_value) {
 792                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
 793                                  ("Error: %s is not initialized (Algo Components Validation)\n",
 794                                   ptr_name));
 795                return 0;
 796        }
 797
 798        return 1;
 799}
 800
 801int ddr3_tip_validate_algo_components(u8 dev_num)
 802{
 803        int status = 1;
 804
 805        /* Check DGL parameters*/
 806        status &= ddr3_tip_validate_algo_var(ck_delay, PARAM_UNDEFINED, "ck_delay");
 807        status &= ddr3_tip_validate_algo_var(phy_reg3_val, PARAM_UNDEFINED, "phy_reg3_val");
 808        status &= ddr3_tip_validate_algo_var(g_rtt_nom, PARAM_UNDEFINED, "g_rtt_nom");
 809        status &= ddr3_tip_validate_algo_var(g_dic, PARAM_UNDEFINED, "g_dic");
 810        status &= ddr3_tip_validate_algo_var(odt_config, PARAM_UNDEFINED, "odt_config");
 811        status &= ddr3_tip_validate_algo_var(g_zpri_data, PARAM_UNDEFINED, "g_zpri_data");
 812        status &= ddr3_tip_validate_algo_var(g_znri_data, PARAM_UNDEFINED, "g_znri_data");
 813        status &= ddr3_tip_validate_algo_var(g_zpri_ctrl, PARAM_UNDEFINED, "g_zpri_ctrl");
 814        status &= ddr3_tip_validate_algo_var(g_znri_ctrl, PARAM_UNDEFINED, "g_znri_ctrl");
 815        status &= ddr3_tip_validate_algo_var(g_zpodt_data, PARAM_UNDEFINED, "g_zpodt_data");
 816        status &= ddr3_tip_validate_algo_var(g_znodt_data, PARAM_UNDEFINED, "g_znodt_data");
 817        status &= ddr3_tip_validate_algo_var(g_zpodt_ctrl, PARAM_UNDEFINED, "g_zpodt_ctrl");
 818        status &= ddr3_tip_validate_algo_var(g_znodt_ctrl, PARAM_UNDEFINED, "g_znodt_ctrl");
 819
 820        /* Check functions pointers */
 821        status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_dunit_mux_select_func,
 822                                             NULL, "tip_dunit_mux_select_func");
 823        status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_dunit_write,
 824                                             NULL, "mv_ddr_dunit_write");
 825        status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_dunit_read,
 826                                             NULL, "mv_ddr_dunit_read");
 827        status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_phy_write,
 828                                             NULL, "mv_ddr_phy_write");
 829        status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].mv_ddr_phy_read,
 830                                             NULL, "mv_ddr_phy_read");
 831        status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_get_freq_config_info_func,
 832                                             NULL, "tip_get_freq_config_info_func");
 833        status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_set_freq_divider_func,
 834                                             NULL, "tip_set_freq_divider_func");
 835        status &= ddr3_tip_validate_algo_ptr(config_func_info[dev_num].tip_get_clock_ratio,
 836                                             NULL, "tip_get_clock_ratio");
 837
 838        status &= ddr3_tip_validate_algo_ptr(dq_map_table, NULL, "dq_map_table");
 839        status &= ddr3_tip_validate_algo_var(dfs_low_freq, 0, "dfs_low_freq");
 840
 841        return (status == 1) ? MV_OK : MV_NOT_INITIALIZED;
 842}
 843
 844
 845int ddr3_pre_algo_config(void)
 846{
 847        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
 848
 849        /* Set Bus3 ECC training mode */
 850        if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) {
 851                /* Set Bus3 ECC MUX */
 852                CHECK_STATUS(ddr3_tip_if_write
 853                             (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
 854                              DRAM_PINS_MUX_REG, 0x100, 0x100));
 855        }
 856
 857        /* Set regular ECC training mode (bus4 and bus 3) */
 858        if ((DDR3_IS_ECC_PUP4_MODE(tm->bus_act_mask)) ||
 859            (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) ||
 860            (DDR3_IS_ECC_PUP8_MODE(tm->bus_act_mask))) {
 861                /* Enable ECC Write MUX */
 862                CHECK_STATUS(ddr3_tip_if_write
 863                             (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
 864                              TRAINING_SW_2_REG, 0x100, 0x100));
 865                /* General ECC enable */
 866                CHECK_STATUS(ddr3_tip_if_write
 867                             (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
 868                              SDRAM_CFG_REG, 0x40000, 0x40000));
 869                /* Disable Read Data ECC MUX */
 870                CHECK_STATUS(ddr3_tip_if_write
 871                             (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
 872                              TRAINING_SW_2_REG, 0x0, 0x2));
 873        }
 874
 875        return MV_OK;
 876}
 877
 878int ddr3_post_algo_config(void)
 879{
 880        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
 881        int status;
 882
 883        status = ddr3_post_run_alg();
 884        if (MV_OK != status) {
 885                printf("DDR3 Post Run Alg - FAILED 0x%x\n", status);
 886                return status;
 887        }
 888
 889        /* Un_set ECC training mode */
 890        if ((DDR3_IS_ECC_PUP4_MODE(tm->bus_act_mask)) ||
 891            (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask)) ||
 892            (DDR3_IS_ECC_PUP8_MODE(tm->bus_act_mask))) {
 893                /* Disable ECC Write MUX */
 894                CHECK_STATUS(ddr3_tip_if_write
 895                             (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
 896                              TRAINING_SW_2_REG, 0x0, 0x100));
 897                /* General ECC and Bus3 ECC MUX remains enabled */
 898        }
 899
 900        return MV_OK;
 901}
 902
 903/*
 904 * Run Training Flow
 905 */
 906int hws_ddr3_tip_run_alg(u32 dev_num, enum hws_algo_type algo_type)
 907{
 908        int status = MV_OK;
 909
 910        status = ddr3_pre_algo_config();
 911        if (MV_OK != status) {
 912                printf("DDR3 Pre Algo Config - FAILED 0x%x\n", status);
 913                return status;
 914        }
 915
 916#ifdef ODT_TEST_SUPPORT
 917        if (finger_test == 1)
 918                return odt_test(dev_num, algo_type);
 919#endif
 920
 921        if (algo_type == ALGO_TYPE_DYNAMIC) {
 922                status = ddr3_tip_ddr3_auto_tune(dev_num);
 923        }
 924
 925        if (status != MV_OK) {
 926                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
 927                                  ("********   DRAM initialization Failed (res 0x%x)   ********\n",
 928                                   status));
 929                return status;
 930        }
 931
 932        status = ddr3_post_algo_config();
 933        if (MV_OK != status) {
 934                printf("DDR3 Post Algo Config - FAILED 0x%x\n", status);
 935                return status;
 936        }
 937
 938        return status;
 939}
 940
 941#ifdef ODT_TEST_SUPPORT
 942/*
 943 * ODT Test
 944 */
 945static int odt_test(u32 dev_num, enum hws_algo_type algo_type)
 946{
 947        int ret = MV_OK, ret_tune = MV_OK;
 948        int pfinger_val = 0, nfinger_val;
 949
 950        for (pfinger_val = p_finger_start; pfinger_val <= p_finger_end;
 951             pfinger_val += p_finger_step) {
 952                for (nfinger_val = n_finger_start; nfinger_val <= n_finger_end;
 953                     nfinger_val += n_finger_step) {
 954                        if (finger_test != 0) {
 955                                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
 956                                                  ("pfinger_val %d nfinger_val %d\n",
 957                                                   pfinger_val, nfinger_val));
 958                                /*
 959                                 * TODO: need to check the correctness
 960                                 * of the following two lines.
 961                                 */
 962                                g_zpodt_data = pfinger_val;
 963                                g_znodt_data = nfinger_val;
 964                        }
 965
 966                        if (algo_type == ALGO_TYPE_DYNAMIC) {
 967                                ret = ddr3_tip_ddr3_auto_tune(dev_num);
 968                        }
 969                }
 970        }
 971
 972        if (ret_tune != MV_OK) {
 973                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
 974                                  ("Run_alg: tuning failed %d\n", ret_tune));
 975                ret = (ret == MV_OK) ? ret_tune : ret;
 976        }
 977
 978        return ret;
 979}
 980#endif
 981
 982/*
 983 * Select Controller
 984 */
 985int hws_ddr3_tip_select_ddr_controller(u32 dev_num, int enable)
 986{
 987        return config_func_info[dev_num].
 988                tip_dunit_mux_select_func((u8)dev_num, enable);
 989}
 990
 991/*
 992 * Dunit Register Write
 993 */
 994int ddr3_tip_if_write(u32 dev_num, enum hws_access_type interface_access,
 995                      u32 if_id, u32 reg_addr, u32 data_value, u32 mask)
 996{
 997        config_func_info[dev_num].mv_ddr_dunit_write(reg_addr, mask, data_value);
 998
 999        return MV_OK;
1000}
1001
1002/*
1003 * Dunit Register Read
1004 */
1005int ddr3_tip_if_read(u32 dev_num, enum hws_access_type interface_access,
1006                     u32 if_id, u32 reg_addr, u32 *data, u32 mask)
1007{
1008        config_func_info[dev_num].mv_ddr_dunit_read(reg_addr, mask, data);
1009
1010        return MV_OK;
1011}
1012
1013/*
1014 * Dunit Register Polling
1015 */
1016int ddr3_tip_if_polling(u32 dev_num, enum hws_access_type access_type,
1017                        u32 if_id, u32 exp_value, u32 mask, u32 offset,
1018                        u32 poll_tries)
1019{
1020        u32 poll_cnt = 0, interface_num = 0, start_if, end_if;
1021        u32 read_data[MAX_INTERFACE_NUM];
1022        int ret;
1023        int is_fail = 0, is_if_fail;
1024        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1025
1026        if (access_type == ACCESS_TYPE_MULTICAST) {
1027                start_if = 0;
1028                end_if = MAX_INTERFACE_NUM - 1;
1029        } else {
1030                start_if = if_id;
1031                end_if = if_id;
1032        }
1033
1034        for (interface_num = start_if; interface_num <= end_if; interface_num++) {
1035                /* polling bit 3 for n times */
1036                VALIDATE_IF_ACTIVE(tm->if_act_mask, interface_num);
1037
1038                is_if_fail = 0;
1039                for (poll_cnt = 0; poll_cnt < poll_tries; poll_cnt++) {
1040                        ret =
1041                                ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST,
1042                                                 interface_num, offset, read_data,
1043                                                 mask);
1044                        if (ret != MV_OK)
1045                                return ret;
1046
1047                        if (read_data[interface_num] == exp_value)
1048                                break;
1049                }
1050
1051                if (poll_cnt >= poll_tries) {
1052                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1053                                          ("max poll IF #%d\n", interface_num));
1054                        is_fail = 1;
1055                        is_if_fail = 1;
1056                }
1057
1058                training_result[training_stage][interface_num] =
1059                        (is_if_fail == 1) ? TEST_FAILED : TEST_SUCCESS;
1060        }
1061
1062        return (is_fail == 0) ? MV_OK : MV_FAIL;
1063}
1064
1065/*
1066 * Bus read access
1067 */
1068int ddr3_tip_bus_read(u32 dev_num, u32 if_id,
1069                      enum hws_access_type phy_access, u32 phy_id,
1070                      enum hws_ddr_phy phy_type, u32 reg_addr, u32 *data)
1071{
1072        return config_func_info[dev_num].
1073                mv_ddr_phy_read(phy_access, phy_id, phy_type, reg_addr, data);
1074}
1075
1076/*
1077 * Bus write access
1078 */
1079int ddr3_tip_bus_write(u32 dev_num, enum hws_access_type interface_access,
1080                       u32 if_id, enum hws_access_type phy_access,
1081                       u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr,
1082                       u32 data_value)
1083{
1084        return config_func_info[dev_num].
1085                mv_ddr_phy_write(phy_access, phy_id, phy_type, reg_addr, data_value, OPERATION_WRITE);
1086}
1087
1088
1089/*
1090 * Phy read-modify-write
1091 */
1092int ddr3_tip_bus_read_modify_write(u32 dev_num, enum hws_access_type access_type,
1093                                   u32 interface_id, u32 phy_id,
1094                                   enum hws_ddr_phy phy_type, u32 reg_addr,
1095                                   u32 data_value, u32 reg_mask)
1096{
1097        u32 data_val = 0, if_id, start_if, end_if;
1098        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1099
1100        if (access_type == ACCESS_TYPE_MULTICAST) {
1101                start_if = 0;
1102                end_if = MAX_INTERFACE_NUM - 1;
1103        } else {
1104                start_if = interface_id;
1105                end_if = interface_id;
1106        }
1107
1108        for (if_id = start_if; if_id <= end_if; if_id++) {
1109                VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1110                CHECK_STATUS(ddr3_tip_bus_read
1111                             (dev_num, if_id, ACCESS_TYPE_UNICAST, phy_id,
1112                              phy_type, reg_addr, &data_val));
1113                data_value = (data_val & (~reg_mask)) | (data_value & reg_mask);
1114                CHECK_STATUS(ddr3_tip_bus_write
1115                             (dev_num, ACCESS_TYPE_UNICAST, if_id,
1116                              ACCESS_TYPE_UNICAST, phy_id, phy_type, reg_addr,
1117                              data_value));
1118        }
1119
1120        return MV_OK;
1121}
1122
1123/*
1124 * ADLL Calibration
1125 */
1126int adll_calibration(u32 dev_num, enum hws_access_type access_type,
1127                     u32 if_id, enum mv_ddr_freq frequency)
1128{
1129        struct hws_tip_freq_config_info freq_config_info;
1130        u32 bus_cnt = 0;
1131        u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1132        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1133
1134        /* Reset Diver_b assert -> de-assert */
1135        CHECK_STATUS(ddr3_tip_if_write
1136                     (dev_num, access_type, if_id, SDRAM_CFG_REG,
1137                      0, 0x10000000));
1138        mdelay(10);
1139        CHECK_STATUS(ddr3_tip_if_write
1140                     (dev_num, access_type, if_id, SDRAM_CFG_REG,
1141                      0x10000000, 0x10000000));
1142
1143        CHECK_STATUS(config_func_info[dev_num].
1144                     tip_get_freq_config_info_func((u8)dev_num, frequency,
1145                                                   &freq_config_info));
1146
1147        for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
1148                VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
1149                CHECK_STATUS(ddr3_tip_bus_read_modify_write
1150                             (dev_num, access_type, if_id, bus_cnt,
1151                              DDR_PHY_DATA, ADLL_CFG0_PHY_REG,
1152                              freq_config_info.bw_per_freq << 8, 0x700));
1153                CHECK_STATUS(ddr3_tip_bus_read_modify_write
1154                             (dev_num, access_type, if_id, bus_cnt,
1155                              DDR_PHY_DATA, ADLL_CFG2_PHY_REG,
1156                              freq_config_info.rate_per_freq, 0x7));
1157        }
1158
1159        for (bus_cnt = 0; bus_cnt < DDR_IF_CTRL_SUBPHYS_NUM; bus_cnt++) {
1160                CHECK_STATUS(ddr3_tip_bus_read_modify_write
1161                             (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_cnt,
1162                              DDR_PHY_CONTROL, ADLL_CFG0_PHY_REG,
1163                              freq_config_info.bw_per_freq << 8, 0x700));
1164                CHECK_STATUS(ddr3_tip_bus_read_modify_write
1165                             (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_cnt,
1166                              DDR_PHY_CONTROL, ADLL_CFG2_PHY_REG,
1167                              freq_config_info.rate_per_freq, 0x7));
1168        }
1169
1170        /* DUnit to Phy drive post edge, ADLL reset assert de-assert */
1171        CHECK_STATUS(ddr3_tip_if_write
1172                     (dev_num, access_type, if_id, DRAM_PHY_CFG_REG,
1173                      0, (0x80000000 | 0x40000000)));
1174        mdelay(100 / (mv_ddr_freq_get(frequency)) / mv_ddr_freq_get(MV_DDR_FREQ_LOW_FREQ));
1175        CHECK_STATUS(ddr3_tip_if_write
1176                     (dev_num, access_type, if_id, DRAM_PHY_CFG_REG,
1177                      (0x80000000 | 0x40000000), (0x80000000 | 0x40000000)));
1178
1179        /* polling for ADLL Done */
1180        if (ddr3_tip_if_polling(dev_num, access_type, if_id,
1181                                0x3ff03ff, 0x3ff03ff, PHY_LOCK_STATUS_REG,
1182                                MAX_POLLING_ITERATIONS) != MV_OK) {
1183                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1184                                  ("Freq_set: DDR3 poll failed(1)"));
1185        }
1186
1187        /* pup data_pup reset assert-> deassert */
1188        CHECK_STATUS(ddr3_tip_if_write
1189                     (dev_num, access_type, if_id, SDRAM_CFG_REG,
1190                      0, 0x60000000));
1191        mdelay(10);
1192        CHECK_STATUS(ddr3_tip_if_write
1193                     (dev_num, access_type, if_id, SDRAM_CFG_REG,
1194                      0x60000000, 0x60000000));
1195
1196        return MV_OK;
1197}
1198
1199int ddr3_tip_freq_set(u32 dev_num, enum hws_access_type access_type,
1200                      u32 if_id, enum mv_ddr_freq frequency)
1201{
1202        u32 cl_value = 0, cwl_value = 0, mem_mask = 0, val = 0,
1203                bus_cnt = 0, t_wr = 0, t_ckclk = 0,
1204                cnt_id;
1205        u32 end_if, start_if;
1206        u32 bus_index = 0;
1207        int is_dll_off = 0;
1208        enum mv_ddr_speed_bin speed_bin_index = 0;
1209        struct hws_tip_freq_config_info freq_config_info;
1210        enum hws_result *flow_result = training_result[training_stage];
1211        u32 adll_tap = 0;
1212        u32 cs_num;
1213        u32 t2t;
1214        u32 cs_mask[MAX_INTERFACE_NUM];
1215        u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1216        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1217        unsigned int tclk;
1218        enum mv_ddr_timing timing = tm->interface_params[if_id].timing;
1219        u32 freq = mv_ddr_freq_get(frequency);
1220
1221        DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1222                          ("dev %d access %d IF %d freq %d\n", dev_num,
1223                           access_type, if_id, frequency));
1224
1225        if (frequency == MV_DDR_FREQ_LOW_FREQ)
1226                is_dll_off = 1;
1227        if (access_type == ACCESS_TYPE_MULTICAST) {
1228                start_if = 0;
1229                end_if = MAX_INTERFACE_NUM - 1;
1230        } else {
1231                start_if = if_id;
1232                end_if = if_id;
1233        }
1234
1235        /* calculate interface cs mask - Oferb 4/11 */
1236        /* speed bin can be different for each interface */
1237        for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1238                /* cs enable is active low */
1239                VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1240                cs_mask[if_id] = CS_BIT_MASK;
1241                training_result[training_stage][if_id] = TEST_SUCCESS;
1242                ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs,
1243                                      &cs_mask[if_id]);
1244        }
1245
1246        /* speed bin can be different for each interface */
1247        /*
1248         * moti b - need to remove the loop for multicas access functions
1249         * and loop the unicast access functions
1250         */
1251        for (if_id = start_if; if_id <= end_if; if_id++) {
1252                VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1253
1254                flow_result[if_id] = TEST_SUCCESS;
1255                speed_bin_index =
1256                        tm->interface_params[if_id].speed_bin_index;
1257                if (tm->interface_params[if_id].memory_freq ==
1258                    frequency) {
1259                        cl_value =
1260                                tm->interface_params[if_id].cas_l;
1261                        cwl_value =
1262                                tm->interface_params[if_id].cas_wl;
1263                } else if (tm->cfg_src == MV_DDR_CFG_SPD) {
1264                        tclk = 1000000 / freq;
1265                        cl_value = mv_ddr_cl_calc(tm->timing_data[MV_DDR_TAA_MIN], tclk);
1266                        if (cl_value == 0) {
1267                                printf("mv_ddr: unsupported cas latency value found\n");
1268                                return MV_FAIL;
1269                        }
1270                        cwl_value = mv_ddr_cwl_calc(tclk);
1271                        if (cwl_value == 0) {
1272                                printf("mv_ddr: unsupported cas write latency value found\n");
1273                                return MV_FAIL;
1274                        }
1275                } else {
1276                        cl_value = mv_ddr_cl_val_get(speed_bin_index, frequency);
1277                        cwl_value = mv_ddr_cwl_val_get(speed_bin_index, frequency);
1278                }
1279
1280                DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1281                                  ("Freq_set dev 0x%x access 0x%x if 0x%x freq 0x%x speed %d:\n\t",
1282                                   dev_num, access_type, if_id,
1283                                   frequency, speed_bin_index));
1284
1285                for (cnt_id = 0; cnt_id < MV_DDR_FREQ_LAST; cnt_id++) {
1286                        DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1287                                          ("%d ", mv_ddr_cl_val_get(speed_bin_index, cnt_id)));
1288                }
1289
1290                DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, ("\n"));
1291                mem_mask = 0;
1292                for (bus_index = 0; bus_index < octets_per_if_num;
1293                     bus_index++) {
1294                        VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index);
1295                        mem_mask |=
1296                                tm->interface_params[if_id].
1297                                as_bus_params[bus_index].mirror_enable_bitmask;
1298                }
1299
1300                if (mem_mask != 0) {
1301                        /* motib redundent in KW28 */
1302                        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1303                                                       if_id,
1304                                                       DUAL_DUNIT_CFG_REG, 0, 0x8));
1305                }
1306
1307                /* dll state after exiting SR */
1308                if (is_dll_off == 1) {
1309                        CHECK_STATUS(ddr3_tip_if_write
1310                                     (dev_num, access_type, if_id,
1311                                      DFS_REG, 0x1, 0x1));
1312                } else {
1313                        CHECK_STATUS(ddr3_tip_if_write
1314                                     (dev_num, access_type, if_id,
1315                                      DFS_REG, 0, 0x1));
1316                }
1317
1318                CHECK_STATUS(ddr3_tip_if_write
1319                             (dev_num, access_type, if_id,
1320                              DUNIT_MMASK_REG, 0, 0x1));
1321                /* DFS  - block  transactions */
1322                CHECK_STATUS(ddr3_tip_if_write
1323                             (dev_num, access_type, if_id,
1324                              DFS_REG, 0x2, 0x2));
1325
1326                /* disable ODT in case of dll off */
1327                if (is_dll_off == 1) {
1328                        CHECK_STATUS(ddr3_tip_if_write
1329                                     (dev_num, access_type, if_id,
1330                                      0x1874, 0, 0x244));
1331                        CHECK_STATUS(ddr3_tip_if_write
1332                                     (dev_num, access_type, if_id,
1333                                      0x1884, 0, 0x244));
1334                        CHECK_STATUS(ddr3_tip_if_write
1335                                     (dev_num, access_type, if_id,
1336                                      0x1894, 0, 0x244));
1337                        CHECK_STATUS(ddr3_tip_if_write
1338                                     (dev_num, access_type, if_id,
1339                                      0x18a4, 0, 0x244));
1340                }
1341
1342                /* DFS  - Enter Self-Refresh */
1343                CHECK_STATUS(ddr3_tip_if_write
1344                             (dev_num, access_type, if_id, DFS_REG, 0x4,
1345                              0x4));
1346                /* polling on self refresh entry */
1347                if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST,
1348                                        if_id, 0x8, 0x8, DFS_REG,
1349                                        MAX_POLLING_ITERATIONS) != MV_OK) {
1350                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1351                                          ("Freq_set: DDR3 poll failed on SR entry\n"));
1352                }
1353
1354                /* Calculate 2T mode */
1355                if (mode_2t != 0xff) {
1356                        t2t = mode_2t;
1357                } else if (timing != MV_DDR_TIM_DEFAULT) {
1358                        t2t = (timing == MV_DDR_TIM_2T) ? 1 : 0;
1359                } else {
1360                        /* Calculate number of CS per interface */
1361                        cs_num = mv_ddr_cs_num_get();
1362                        t2t = (cs_num == 1) ? 0 : 1;
1363                }
1364
1365
1366                if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_INTERLEAVE_WA) == 1) {
1367                        /* Use 1T mode if 1:1 ratio configured */
1368                        if (config_func_info[dev_num].tip_get_clock_ratio(frequency) == 1) {
1369                                /* Low freq*/
1370                                CHECK_STATUS(ddr3_tip_if_write
1371                                             (dev_num, access_type, if_id,
1372                                              SDRAM_OPEN_PAGES_CTRL_REG, 0x0, 0x3C0));
1373                                t2t = 0;
1374                        } else {
1375                                /* Middle or target freq */
1376                                CHECK_STATUS(ddr3_tip_if_write
1377                                             (dev_num, access_type, if_id,
1378                                              SDRAM_OPEN_PAGES_CTRL_REG, 0x3C0, 0x3C0));
1379                        }
1380                }
1381                CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1382                                               DUNIT_CTRL_LOW_REG, t2t << 3, 0x3 << 3));
1383
1384                /* PLL configuration */
1385                config_func_info[dev_num].tip_set_freq_divider_func(dev_num, if_id,
1386                                                                    frequency);
1387
1388                /* DFS  - CL/CWL/WR parameters after exiting SR */
1389                CHECK_STATUS(ddr3_tip_if_write
1390                             (dev_num, access_type, if_id, DFS_REG,
1391                              (cl_mask_table[cl_value] << 8), 0xf00));
1392                CHECK_STATUS(ddr3_tip_if_write
1393                             (dev_num, access_type, if_id, DFS_REG,
1394                              (cwl_mask_table[cwl_value] << 12), 0x7000));
1395
1396                t_ckclk = (MEGA / freq);
1397                t_wr = time_to_nclk(mv_ddr_speed_bin_timing_get
1398                                           (speed_bin_index,
1399                                            SPEED_BIN_TWR), t_ckclk);
1400
1401                CHECK_STATUS(ddr3_tip_if_write
1402                             (dev_num, access_type, if_id, DFS_REG,
1403                              (twr_mask_table[t_wr] << 16), 0x70000));
1404
1405                /* Restore original RTT values if returning from DLL OFF mode */
1406                if (is_dll_off == 1) {
1407                        CHECK_STATUS(ddr3_tip_if_write
1408                                     (dev_num, access_type, if_id, 0x1874,
1409                                      g_dic | g_rtt_nom, 0x266));
1410                        CHECK_STATUS(ddr3_tip_if_write
1411                                     (dev_num, access_type, if_id, 0x1884,
1412                                      g_dic | g_rtt_nom, 0x266));
1413                        CHECK_STATUS(ddr3_tip_if_write
1414                                     (dev_num, access_type, if_id, 0x1894,
1415                                      g_dic | g_rtt_nom, 0x266));
1416                        CHECK_STATUS(ddr3_tip_if_write
1417                                     (dev_num, access_type, if_id, 0x18a4,
1418                                      g_dic | g_rtt_nom, 0x266));
1419                }
1420
1421                /* Reset divider_b assert -> de-assert */
1422                CHECK_STATUS(ddr3_tip_if_write
1423                             (dev_num, access_type, if_id,
1424                              SDRAM_CFG_REG, 0, 0x10000000));
1425                mdelay(10);
1426                CHECK_STATUS(ddr3_tip_if_write
1427                             (dev_num, access_type, if_id,
1428                              SDRAM_CFG_REG, 0x10000000, 0x10000000));
1429
1430                /* ADLL configuration function of process and frequency */
1431                CHECK_STATUS(config_func_info[dev_num].
1432                             tip_get_freq_config_info_func(dev_num, frequency,
1433                                                           &freq_config_info));
1434
1435                /* TBD check milo5 using device ID ? */
1436                for (bus_cnt = 0; bus_cnt < octets_per_if_num;
1437                     bus_cnt++) {
1438                        VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
1439                        CHECK_STATUS(ddr3_tip_bus_read_modify_write
1440                                     (dev_num, ACCESS_TYPE_UNICAST,
1441                                      if_id, bus_cnt, DDR_PHY_DATA,
1442                                      0x92,
1443                                      freq_config_info.
1444                                      bw_per_freq << 8
1445                                      /*freq_mask[dev_num][frequency] << 8 */
1446                                      , 0x700));
1447                        CHECK_STATUS(ddr3_tip_bus_read_modify_write
1448                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1449                                      bus_cnt, DDR_PHY_DATA, 0x94,
1450                                      freq_config_info.rate_per_freq, 0x7));
1451                }
1452
1453                /* Dunit to PHY drive post edge, ADLL reset assert -> de-assert */
1454                CHECK_STATUS(ddr3_tip_if_write
1455                             (dev_num, access_type, if_id,
1456                              DRAM_PHY_CFG_REG, 0,
1457                              (0x80000000 | 0x40000000)));
1458                mdelay(100 / (freq / mv_ddr_freq_get(MV_DDR_FREQ_LOW_FREQ)));
1459                CHECK_STATUS(ddr3_tip_if_write
1460                             (dev_num, access_type, if_id,
1461                              DRAM_PHY_CFG_REG, (0x80000000 | 0x40000000),
1462                              (0x80000000 | 0x40000000)));
1463
1464                /* polling for ADLL Done */
1465                if (ddr3_tip_if_polling
1466                    (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x3ff03ff,
1467                     0x3ff03ff, PHY_LOCK_STATUS_REG,
1468                     MAX_POLLING_ITERATIONS) != MV_OK) {
1469                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1470                                          ("Freq_set: DDR3 poll failed(1)\n"));
1471                }
1472
1473                /* pup data_pup reset assert-> deassert */
1474                CHECK_STATUS(ddr3_tip_if_write
1475                             (dev_num, access_type, if_id,
1476                              SDRAM_CFG_REG, 0, 0x60000000));
1477                mdelay(10);
1478                CHECK_STATUS(ddr3_tip_if_write
1479                             (dev_num, access_type, if_id,
1480                              SDRAM_CFG_REG, 0x60000000, 0x60000000));
1481
1482                /* Set proper timing params before existing Self-Refresh */
1483                ddr3_tip_set_timing(dev_num, access_type, if_id, frequency);
1484                if (delay_enable != 0) {
1485                        adll_tap = (is_dll_off == 1) ? 1000 : (MEGA / (freq * 64));
1486                        ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap);
1487                }
1488
1489                /* Exit SR */
1490                CHECK_STATUS(ddr3_tip_if_write
1491                             (dev_num, access_type, if_id, DFS_REG, 0,
1492                              0x4));
1493                if (ddr3_tip_if_polling
1494                    (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x8, DFS_REG,
1495                     MAX_POLLING_ITERATIONS) != MV_OK) {
1496                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1497                                          ("Freq_set: DDR3 poll failed(2)"));
1498                }
1499
1500                /* Refresh Command */
1501                CHECK_STATUS(ddr3_tip_if_write
1502                             (dev_num, access_type, if_id,
1503                              SDRAM_OP_REG, 0x2, 0xf1f));
1504                if (ddr3_tip_if_polling
1505                    (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f,
1506                     SDRAM_OP_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
1507                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1508                                          ("Freq_set: DDR3 poll failed(3)"));
1509                }
1510
1511                /* Release DFS Block */
1512                CHECK_STATUS(ddr3_tip_if_write
1513                             (dev_num, access_type, if_id, DFS_REG, 0,
1514                              0x2));
1515                /* Controller to MBUS Retry - normal */
1516                CHECK_STATUS(ddr3_tip_if_write
1517                             (dev_num, access_type, if_id, DUNIT_MMASK_REG,
1518                              0x1, 0x1));
1519
1520                /* MRO: Burst Length 8, CL , Auto_precharge 0x16cc */
1521                val =
1522                        ((cl_mask_table[cl_value] & 0x1) << 2) |
1523                        ((cl_mask_table[cl_value] & 0xe) << 3);
1524                CHECK_STATUS(ddr3_tip_if_write
1525                             (dev_num, access_type, if_id, MR0_REG,
1526                              val, (0x7 << 4) | (1 << 2)));
1527                /* MR2:  CWL = 10 , Auto Self-Refresh - disable */
1528                val = (cwl_mask_table[cwl_value] << 3) | g_rtt_wr;
1529                /*
1530                 * nklein 24.10.13 - should not be here - leave value as set in
1531                 * the init configuration val |= (1 << 9);
1532                 * val |= ((tm->interface_params[if_id].
1533                 * interface_temp == MV_DDR_TEMP_HIGH) ? (1 << 7) : 0);
1534                 */
1535                /* nklein 24.10.13 - see above comment */
1536                CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1537                                               if_id, MR2_REG,
1538                                               val, (0x7 << 3) | (0x3 << 9)));
1539
1540                /* ODT TIMING */
1541                val = ((cl_value - cwl_value + 1) << 4) |
1542                        ((cl_value - cwl_value + 6) << 8) |
1543                        ((cl_value - 1) << 12) | ((cl_value + 6) << 16);
1544                CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1545                                               if_id, DDR_ODT_TIMING_LOW_REG,
1546                                               val, 0xffff0));
1547                val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12);
1548                CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1549                                               if_id, DDR_ODT_TIMING_HIGH_REG,
1550                                               val, 0xffff));
1551
1552                /* in case of ddr4 need to set the receiver to odt always 'on' (odt_config = '0')
1553                 * in case of ddr3 configure the odt through the timing
1554                 */
1555                if (odt_config != 0) {
1556                        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DUNIT_ODT_CTRL_REG, 0xf, 0xf));
1557                }
1558                else {
1559                        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DUNIT_ODT_CTRL_REG,
1560                                                       0x30f, 0x30f));
1561                }
1562
1563                /* re-write CL */
1564                val = ((cl_mask_table[cl_value] & 0x1) << 2) |
1565                        ((cl_mask_table[cl_value] & 0xe) << 3);
1566
1567                cs_mask[0] = 0xc;
1568
1569                CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MR_CMD0,
1570                        val, (0x7 << 4) | (0x1 << 2)));
1571
1572                /* re-write CWL */
1573                val = (cwl_mask_table[cwl_value] << 3) | g_rtt_wr;
1574                CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MR_CMD2,
1575                        val, (0x7 << 3) | (0x3 << 9)));
1576
1577                if (mem_mask != 0) {
1578                        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1579                                                       if_id,
1580                                                       DUAL_DUNIT_CFG_REG,
1581                                                       1 << 3, 0x8));
1582                }
1583        }
1584
1585        return MV_OK;
1586}
1587
1588/*
1589 * Set ODT values
1590 */
1591static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type,
1592                              u32 if_id, u32 cl_value, u32 cwl_value)
1593{
1594        /* ODT TIMING */
1595        u32 val = (cl_value - cwl_value + 6);
1596
1597        val = ((cl_value - cwl_value + 1) << 4) | ((val & 0xf) << 8) |
1598                (((cl_value - 1) & 0xf) << 12) |
1599                (((cl_value + 6) & 0xf) << 16) | (((val & 0x10) >> 4) << 21);
1600        val |= (((cl_value - 1) >> 4) << 22) | (((cl_value + 6) >> 4) << 23);
1601
1602        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1603                                       DDR_ODT_TIMING_LOW_REG, val, 0xffff0));
1604        val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12);
1605        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1606                                       DDR_ODT_TIMING_HIGH_REG, val, 0xffff));
1607        if (odt_additional == 1) {
1608                CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1609                                               if_id,
1610                                               SDRAM_ODT_CTRL_HIGH_REG,
1611                                               0xf, 0xf));
1612        }
1613
1614        /* ODT Active */
1615        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1616                                       DUNIT_ODT_CTRL_REG, 0xf, 0xf));
1617
1618        return MV_OK;
1619}
1620
1621/*
1622 * Set Timing values for training
1623 */
1624static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type,
1625                               u32 if_id, enum mv_ddr_freq frequency)
1626{
1627        u32 t_ckclk = 0, t_ras = 0;
1628        u32 t_rcd = 0, t_rp = 0, t_wr = 0, t_wtr = 0, t_rrd = 0, t_rtp = 0,
1629                t_rfc = 0, t_mod = 0, t_r2r = 0x3, t_r2r_high = 0,
1630                t_r2w_w2r = 0x3, t_r2w_w2r_high = 0x1, t_w2w = 0x3;
1631        u32 refresh_interval_cnt, t_hclk, t_refi, t_faw, t_pd, t_xpdll;
1632        u32 val = 0, page_size = 0, mask = 0;
1633        enum mv_ddr_speed_bin speed_bin_index;
1634        enum mv_ddr_die_capacity memory_size = MV_DDR_DIE_CAP_2GBIT;
1635        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1636        u32 freq = mv_ddr_freq_get(frequency);
1637
1638        speed_bin_index = tm->interface_params[if_id].speed_bin_index;
1639        memory_size = tm->interface_params[if_id].memory_size;
1640        page_size = mv_ddr_page_size_get(tm->interface_params[if_id].bus_width, memory_size);
1641        t_ckclk = (MEGA / freq);
1642        /* HCLK in[ps] */
1643        t_hclk = MEGA / (freq / config_func_info[dev_num].tip_get_clock_ratio(frequency));
1644
1645        t_refi = (tm->interface_params[if_id].interface_temp == MV_DDR_TEMP_HIGH) ? TREFI_HIGH : TREFI_LOW;
1646        t_refi *= 1000; /* psec */
1647        refresh_interval_cnt = t_refi / t_hclk; /* no units */
1648
1649        if (page_size == 1) {
1650                t_faw = mv_ddr_speed_bin_timing_get(speed_bin_index, SPEED_BIN_TFAW1K);
1651                t_faw = time_to_nclk(t_faw, t_ckclk);
1652                t_faw = GET_MAX_VALUE(20, t_faw);
1653        } else {        /* page size =2, we do not support page size 0.5k */
1654                t_faw = mv_ddr_speed_bin_timing_get(speed_bin_index, SPEED_BIN_TFAW2K);
1655                t_faw = time_to_nclk(t_faw, t_ckclk);
1656                t_faw = GET_MAX_VALUE(28, t_faw);
1657        }
1658
1659        t_pd = GET_MAX_VALUE(t_ckclk * 3, mv_ddr_speed_bin_timing_get(speed_bin_index, SPEED_BIN_TPD));
1660        t_pd = time_to_nclk(t_pd, t_ckclk);
1661
1662        t_xpdll = GET_MAX_VALUE(t_ckclk * 10, mv_ddr_speed_bin_timing_get(speed_bin_index, SPEED_BIN_TXPDLL));
1663        t_xpdll = time_to_nclk(t_xpdll, t_ckclk);
1664
1665        t_rrd = (page_size == 1) ? mv_ddr_speed_bin_timing_get(speed_bin_index,
1666                                                   SPEED_BIN_TRRD1K) :
1667                mv_ddr_speed_bin_timing_get(speed_bin_index, SPEED_BIN_TRRD2K);
1668        t_rrd = GET_MAX_VALUE(t_ckclk * 4, t_rrd);
1669        t_rtp = GET_MAX_VALUE(t_ckclk * 4, mv_ddr_speed_bin_timing_get(speed_bin_index,
1670                                                           SPEED_BIN_TRTP));
1671        t_mod = GET_MAX_VALUE(t_ckclk * 12, 15000);
1672        t_wtr = GET_MAX_VALUE(t_ckclk * 4, mv_ddr_speed_bin_timing_get(speed_bin_index,
1673                                                           SPEED_BIN_TWTR));
1674        t_ras = time_to_nclk(mv_ddr_speed_bin_timing_get(speed_bin_index,
1675                                                    SPEED_BIN_TRAS),
1676                                    t_ckclk);
1677        t_rcd = time_to_nclk(mv_ddr_speed_bin_timing_get(speed_bin_index,
1678                                                    SPEED_BIN_TRCD),
1679                                    t_ckclk);
1680        t_rp = time_to_nclk(mv_ddr_speed_bin_timing_get(speed_bin_index,
1681                                                   SPEED_BIN_TRP),
1682                                   t_ckclk);
1683        t_wr = time_to_nclk(mv_ddr_speed_bin_timing_get(speed_bin_index,
1684                                                   SPEED_BIN_TWR),
1685                                   t_ckclk);
1686        t_wtr = time_to_nclk(t_wtr, t_ckclk);
1687        t_rrd = time_to_nclk(t_rrd, t_ckclk);
1688        t_rtp = time_to_nclk(t_rtp, t_ckclk);
1689        t_rfc = time_to_nclk(mv_ddr_rfc_get(memory_size) * 1000, t_ckclk);
1690        t_mod = time_to_nclk(t_mod, t_ckclk);
1691
1692        /* SDRAM Timing Low */
1693        val = (((t_ras - 1) & SDRAM_TIMING_LOW_TRAS_MASK) << SDRAM_TIMING_LOW_TRAS_OFFS) |
1694              (((t_rcd - 1) & SDRAM_TIMING_LOW_TRCD_MASK) << SDRAM_TIMING_LOW_TRCD_OFFS) |
1695              (((t_rcd - 1) >> SDRAM_TIMING_LOW_TRCD_OFFS & SDRAM_TIMING_HIGH_TRCD_MASK)
1696              << SDRAM_TIMING_HIGH_TRCD_OFFS) |
1697              (((t_rp - 1) & SDRAM_TIMING_LOW_TRP_MASK) << SDRAM_TIMING_LOW_TRP_OFFS) |
1698              (((t_rp - 1) >> SDRAM_TIMING_LOW_TRP_MASK & SDRAM_TIMING_HIGH_TRP_MASK)
1699              << SDRAM_TIMING_HIGH_TRP_OFFS) |
1700              (((t_wr - 1) & SDRAM_TIMING_LOW_TWR_MASK) << SDRAM_TIMING_LOW_TWR_OFFS) |
1701              (((t_wtr - 1) & SDRAM_TIMING_LOW_TWTR_MASK) << SDRAM_TIMING_LOW_TWTR_OFFS) |
1702              ((((t_ras - 1) >> 4) & SDRAM_TIMING_LOW_TRAS_HIGH_MASK) << SDRAM_TIMING_LOW_TRAS_HIGH_OFFS) |
1703              (((t_rrd - 1) & SDRAM_TIMING_LOW_TRRD_MASK) << SDRAM_TIMING_LOW_TRRD_OFFS) |
1704              (((t_rtp - 1) & SDRAM_TIMING_LOW_TRTP_MASK) << SDRAM_TIMING_LOW_TRTP_OFFS);
1705
1706        mask = (SDRAM_TIMING_LOW_TRAS_MASK << SDRAM_TIMING_LOW_TRAS_OFFS) |
1707               (SDRAM_TIMING_LOW_TRCD_MASK << SDRAM_TIMING_LOW_TRCD_OFFS) |
1708               (SDRAM_TIMING_HIGH_TRCD_MASK << SDRAM_TIMING_HIGH_TRCD_OFFS) |
1709               (SDRAM_TIMING_LOW_TRP_MASK << SDRAM_TIMING_LOW_TRP_OFFS) |
1710               (SDRAM_TIMING_HIGH_TRP_MASK << SDRAM_TIMING_HIGH_TRP_OFFS) |
1711               (SDRAM_TIMING_LOW_TWR_MASK << SDRAM_TIMING_LOW_TWR_OFFS) |
1712               (SDRAM_TIMING_LOW_TWTR_MASK << SDRAM_TIMING_LOW_TWTR_OFFS) |
1713               (SDRAM_TIMING_LOW_TRAS_HIGH_MASK << SDRAM_TIMING_LOW_TRAS_HIGH_OFFS) |
1714               (SDRAM_TIMING_LOW_TRRD_MASK << SDRAM_TIMING_LOW_TRRD_OFFS) |
1715               (SDRAM_TIMING_LOW_TRTP_MASK << SDRAM_TIMING_LOW_TRTP_OFFS);
1716
1717        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1718                                       SDRAM_TIMING_LOW_REG, val, mask));
1719
1720        /* SDRAM Timing High */
1721        val = 0;
1722        mask = 0;
1723
1724        val = (((t_rfc - 1) & SDRAM_TIMING_HIGH_TRFC_MASK) << SDRAM_TIMING_HIGH_TRFC_OFFS) |
1725              ((t_r2r & SDRAM_TIMING_HIGH_TR2R_MASK) << SDRAM_TIMING_HIGH_TR2R_OFFS) |
1726              ((t_r2w_w2r & SDRAM_TIMING_HIGH_TR2W_W2R_MASK) << SDRAM_TIMING_HIGH_TR2W_W2R_OFFS) |
1727              ((t_w2w & SDRAM_TIMING_HIGH_TW2W_MASK) << SDRAM_TIMING_HIGH_TW2W_OFFS) |
1728              ((((t_rfc - 1) >> 7) & SDRAM_TIMING_HIGH_TRFC_HIGH_MASK) << SDRAM_TIMING_HIGH_TRFC_HIGH_OFFS) |
1729              ((t_r2r_high & SDRAM_TIMING_HIGH_TR2R_HIGH_MASK) << SDRAM_TIMING_HIGH_TR2R_HIGH_OFFS) |
1730              ((t_r2w_w2r_high & SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_MASK) << SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_OFFS) |
1731              (((t_mod - 1) & SDRAM_TIMING_HIGH_TMOD_MASK) << SDRAM_TIMING_HIGH_TMOD_OFFS) |
1732              ((((t_mod - 1) >> 4) & SDRAM_TIMING_HIGH_TMOD_HIGH_MASK) << SDRAM_TIMING_HIGH_TMOD_HIGH_OFFS);
1733
1734        mask = (SDRAM_TIMING_HIGH_TRFC_MASK << SDRAM_TIMING_HIGH_TRFC_OFFS) |
1735               (SDRAM_TIMING_HIGH_TR2R_MASK << SDRAM_TIMING_HIGH_TR2R_OFFS) |
1736               (SDRAM_TIMING_HIGH_TR2W_W2R_MASK << SDRAM_TIMING_HIGH_TR2W_W2R_OFFS) |
1737               (SDRAM_TIMING_HIGH_TW2W_MASK << SDRAM_TIMING_HIGH_TW2W_OFFS) |
1738               (SDRAM_TIMING_HIGH_TRFC_HIGH_MASK << SDRAM_TIMING_HIGH_TRFC_HIGH_OFFS) |
1739               (SDRAM_TIMING_HIGH_TR2R_HIGH_MASK << SDRAM_TIMING_HIGH_TR2R_HIGH_OFFS) |
1740               (SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_MASK << SDRAM_TIMING_HIGH_TR2W_W2R_HIGH_OFFS) |
1741               (SDRAM_TIMING_HIGH_TMOD_MASK << SDRAM_TIMING_HIGH_TMOD_OFFS) |
1742               (SDRAM_TIMING_HIGH_TMOD_HIGH_MASK << SDRAM_TIMING_HIGH_TMOD_HIGH_OFFS);
1743
1744        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1745                                       SDRAM_TIMING_HIGH_REG, val, mask));
1746
1747        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1748                                       SDRAM_CFG_REG,
1749                                       refresh_interval_cnt << REFRESH_OFFS,
1750                                       REFRESH_MASK << REFRESH_OFFS));
1751        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1752                                       SDRAM_ADDR_CTRL_REG, (t_faw - 1) << T_FAW_OFFS,
1753                                       T_FAW_MASK << T_FAW_OFFS));
1754
1755        CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, DDR_TIMING_REG,
1756                                       (t_pd - 1) << DDR_TIMING_TPD_OFFS |
1757                                       (t_xpdll - 1) << DDR_TIMING_TXPDLL_OFFS,
1758                                       DDR_TIMING_TPD_MASK << DDR_TIMING_TPD_OFFS |
1759                                       DDR_TIMING_TXPDLL_MASK << DDR_TIMING_TXPDLL_OFFS));
1760
1761
1762        return MV_OK;
1763}
1764
1765
1766/*
1767 * Write CS Result
1768 */
1769int ddr3_tip_write_cs_result(u32 dev_num, u32 offset)
1770{
1771        u32 if_id, bus_num, cs_bitmask, data_val, cs_num;
1772        u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1773        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1774
1775        for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1776                VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1777                for (bus_num = 0; bus_num < octets_per_if_num;
1778                     bus_num++) {
1779                        VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num);
1780                        cs_bitmask =
1781                                tm->interface_params[if_id].
1782                                as_bus_params[bus_num].cs_bitmask;
1783                        if (cs_bitmask != effective_cs) {
1784                                cs_num = GET_CS_FROM_MASK(cs_bitmask);
1785                                ddr3_tip_bus_read(dev_num, if_id,
1786                                                  ACCESS_TYPE_UNICAST, bus_num,
1787                                                  DDR_PHY_DATA,
1788                                                  offset +
1789                                                  (effective_cs * 0x4),
1790                                                  &data_val);
1791                                ddr3_tip_bus_write(dev_num,
1792                                                   ACCESS_TYPE_UNICAST,
1793                                                   if_id,
1794                                                   ACCESS_TYPE_UNICAST,
1795                                                   bus_num, DDR_PHY_DATA,
1796                                                   offset +
1797                                                   (cs_num * 0x4),
1798                                                   data_val);
1799                        }
1800                }
1801        }
1802
1803        return MV_OK;
1804}
1805
1806/*
1807 * Write MRS
1808 */
1809int ddr3_tip_write_mrs_cmd(u32 dev_num, u32 *cs_mask_arr, enum mr_number mr_num, u32 data, u32 mask)
1810{
1811        u32 if_id;
1812        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1813
1814        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1815                                       PARAM_NOT_CARE, mr_data[mr_num].reg_addr, data, mask));
1816        for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1817                VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1818                CHECK_STATUS(ddr3_tip_if_write
1819                             (dev_num, ACCESS_TYPE_UNICAST, if_id,
1820                              SDRAM_OP_REG,
1821                              (cs_mask_arr[if_id] << 8) | mr_data[mr_num].cmd, 0xf1f));
1822        }
1823
1824        for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1825                VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1826                if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
1827                                        0x1f, SDRAM_OP_REG,
1828                                        MAX_POLLING_ITERATIONS) != MV_OK) {
1829                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1830                                          ("write_mrs_cmd: Poll cmd fail"));
1831                }
1832        }
1833
1834        return MV_OK;
1835}
1836
1837/*
1838 * Reset XSB Read FIFO
1839 */
1840int ddr3_tip_reset_fifo_ptr(u32 dev_num)
1841{
1842        u32 if_id = 0;
1843
1844        /* Configure PHY reset value to 0 in order to "clean" the FIFO */
1845        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1846                                       if_id, 0x15c8, 0, 0xff000000));
1847        /*
1848         * Move PHY to RL mode (only in RL mode the PHY overrides FIFO values
1849         * during FIFO reset)
1850         */
1851        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1852                                       if_id, TRAINING_SW_2_REG,
1853                                       0x1, 0x9));
1854        /* In order that above configuration will influence the PHY */
1855        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1856                                       if_id, 0x15b0,
1857                                       0x80000000, 0x80000000));
1858        /* Reset read fifo assertion */
1859        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1860                                       if_id, 0x1400, 0, 0x40000000));
1861        /* Reset read fifo deassertion */
1862        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1863                                       if_id, 0x1400,
1864                                       0x40000000, 0x40000000));
1865        /* Move PHY back to functional mode */
1866        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1867                                       if_id, TRAINING_SW_2_REG,
1868                                       0x8, 0x9));
1869        /* Stop training machine */
1870        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1871                                       if_id, 0x15b4, 0x10000, 0x10000));
1872
1873        return MV_OK;
1874}
1875
1876/*
1877 * Reset Phy registers
1878 */
1879int ddr3_tip_ddr3_reset_phy_regs(u32 dev_num)
1880{
1881        u32 if_id, phy_id, cs;
1882        u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1883        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1884
1885        for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1886                VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1887                for (phy_id = 0; phy_id < octets_per_if_num;
1888                     phy_id++) {
1889                        VALIDATE_BUS_ACTIVE(tm->bus_act_mask, phy_id);
1890                        CHECK_STATUS(ddr3_tip_bus_write
1891                                     (dev_num, ACCESS_TYPE_UNICAST,
1892                                      if_id, ACCESS_TYPE_UNICAST,
1893                                      phy_id, DDR_PHY_DATA,
1894                                      WL_PHY_REG(effective_cs),
1895                                      phy_reg0_val));
1896                        CHECK_STATUS(ddr3_tip_bus_write
1897                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1898                                      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1899                                      RL_PHY_REG(effective_cs),
1900                                      phy_reg2_val));
1901                        CHECK_STATUS(ddr3_tip_bus_write
1902                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1903                                      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1904                                      CRX_PHY_REG(effective_cs), phy_reg3_val));
1905                        CHECK_STATUS(ddr3_tip_bus_write
1906                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1907                                      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1908                                      CTX_PHY_REG(effective_cs), phy_reg1_val));
1909                        CHECK_STATUS(ddr3_tip_bus_write
1910                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1911                                      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1912                                      PBS_TX_BCAST_PHY_REG(effective_cs), 0x0));
1913                        CHECK_STATUS(ddr3_tip_bus_write
1914                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1915                                      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1916                                      PBS_RX_BCAST_PHY_REG(effective_cs), 0));
1917                        CHECK_STATUS(ddr3_tip_bus_write
1918                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1919                                      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1920                                      PBS_TX_PHY_REG(effective_cs, DQSP_PAD), 0));
1921                        CHECK_STATUS(ddr3_tip_bus_write
1922                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1923                                      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1924                                      PBS_RX_PHY_REG(effective_cs, DQSP_PAD), 0));
1925                        CHECK_STATUS(ddr3_tip_bus_write
1926                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1927                                      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1928                                      PBS_TX_PHY_REG(effective_cs, DQSN_PAD), 0));
1929                        CHECK_STATUS(ddr3_tip_bus_write
1930                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1931                                      ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1932                                      PBS_RX_PHY_REG(effective_cs, DQSN_PAD), 0));
1933                }
1934        }
1935
1936        /* Set Receiver Calibration value */
1937        for (cs = 0; cs < MAX_CS_NUM; cs++) {
1938                /* PHY register 0xdb bits[5:0] - configure to 63 */
1939                CHECK_STATUS(ddr3_tip_bus_write
1940                             (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1941                              ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1942                              DDR_PHY_DATA, VREF_BCAST_PHY_REG(cs), 63));
1943        }
1944
1945        return MV_OK;
1946}
1947
1948/*
1949 * Restore Dunit registers
1950 */
1951int ddr3_tip_restore_dunit_regs(u32 dev_num)
1952{
1953        u32 index_cnt;
1954
1955        mv_ddr_set_calib_controller();
1956
1957        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1958                                       PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG,
1959                                       0x1, 0x1));
1960        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1961                                       PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG,
1962                                       calibration_update_control << 3,
1963                                       0x3 << 3));
1964        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1965                                       PARAM_NOT_CARE,
1966                                       ODPG_WR_RD_MODE_ENA_REG,
1967                                       0xffff, MASK_ALL_BITS));
1968
1969        for (index_cnt = 0; index_cnt < ARRAY_SIZE(odpg_default_value);
1970             index_cnt++) {
1971                CHECK_STATUS(ddr3_tip_if_write
1972                             (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1973                              odpg_default_value[index_cnt].reg_addr,
1974                              odpg_default_value[index_cnt].reg_data,
1975                              odpg_default_value[index_cnt].reg_mask));
1976        }
1977
1978        return MV_OK;
1979}
1980
1981int ddr3_tip_adll_regs_bypass(u32 dev_num, u32 reg_val1, u32 reg_val2)
1982{
1983        u32 if_id, phy_id;
1984        u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1985        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1986
1987        for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1988                VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1989                for (phy_id = 0; phy_id < octets_per_if_num; phy_id++) {
1990                        VALIDATE_BUS_ACTIVE(tm->bus_act_mask, phy_id);
1991                        CHECK_STATUS(ddr3_tip_bus_write
1992                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1993                                     ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1994                                     CTX_PHY_REG(effective_cs), reg_val1));
1995                        CHECK_STATUS(ddr3_tip_bus_write
1996                                     (dev_num, ACCESS_TYPE_UNICAST, if_id,
1997                                     ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1998                                     PBS_TX_BCAST_PHY_REG(effective_cs), reg_val2));
1999                }
2000        }
2001
2002        return MV_OK;
2003}
2004
2005/*
2006 * Auto tune main flow
2007 */
2008static int ddr3_tip_ddr3_training_main_flow(u32 dev_num)
2009{
2010/* TODO: enable this functionality for other platforms */
2011        struct init_cntr_param init_cntr_prm;
2012        int ret = MV_OK;
2013        int adll_bypass_flag = 0;
2014        u32 if_id;
2015        unsigned int max_cs = mv_ddr_cs_num_get();
2016        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2017        enum mv_ddr_freq freq = tm->interface_params[0].memory_freq;
2018        unsigned int *freq_tbl = mv_ddr_freq_tbl_get();
2019
2020#ifdef DDR_VIEWER_TOOL
2021        if (debug_training == DEBUG_LEVEL_TRACE) {
2022                CHECK_STATUS(print_device_info((u8)dev_num));
2023        }
2024#endif
2025
2026        ddr3_tip_validate_algo_components(dev_num);
2027
2028        for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2029                CHECK_STATUS(ddr3_tip_ddr3_reset_phy_regs(dev_num));
2030        }
2031        /* Set to 0 after each loop to avoid illegal value may be used */
2032        effective_cs = 0;
2033
2034        freq_tbl[MV_DDR_FREQ_LOW_FREQ] = dfs_low_freq;
2035
2036        if (is_pll_before_init != 0) {
2037                for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
2038                        VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
2039                        config_func_info[dev_num].tip_set_freq_divider_func(
2040                                (u8)dev_num, if_id, freq);
2041                }
2042        }
2043
2044/* TODO: enable this functionality for other platforms */
2045        if (is_adll_calib_before_init != 0) {
2046                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2047                                  ("with adll calib before init\n"));
2048                adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq);
2049        }
2050
2051        if (is_reg_dump != 0) {
2052                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2053                                  ("Dump before init controller\n"));
2054                ddr3_tip_reg_dump(dev_num);
2055        }
2056
2057        if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
2058                training_stage = INIT_CONTROLLER;
2059                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2060                                  ("INIT_CONTROLLER_MASK_BIT\n"));
2061                init_cntr_prm.do_mrs_phy = 1;
2062                init_cntr_prm.is_ctrl64_bit = 0;
2063                init_cntr_prm.init_phy = 1;
2064                init_cntr_prm.msys_init = 0;
2065                ret = hws_ddr3_tip_init_controller(dev_num, &init_cntr_prm);
2066                if (is_reg_dump != 0)
2067                        ddr3_tip_reg_dump(dev_num);
2068                if (ret != MV_OK) {
2069                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2070                                          ("hws_ddr3_tip_init_controller failure\n"));
2071                        if (debug_mode == 0)
2072                                return MV_FAIL;
2073                }
2074        }
2075
2076        ret = adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq);
2077        if (ret != MV_OK) {
2078                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2079                        ("adll_calibration failure\n"));
2080                if (debug_mode == 0)
2081                        return MV_FAIL;
2082        }
2083
2084        if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
2085                training_stage = SET_LOW_FREQ;
2086
2087                for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2088                        ddr3_tip_adll_regs_bypass(dev_num, 0, 0x1f);
2089                        adll_bypass_flag = 1;
2090                }
2091                effective_cs = 0;
2092
2093                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2094                                  ("SET_LOW_FREQ_MASK_BIT %d\n",
2095                                   freq_tbl[low_freq]));
2096                ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2097                                        PARAM_NOT_CARE, low_freq);
2098                if (is_reg_dump != 0)
2099                        ddr3_tip_reg_dump(dev_num);
2100                if (ret != MV_OK) {
2101                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2102                                          ("ddr3_tip_freq_set failure\n"));
2103                        if (debug_mode == 0)
2104                                return MV_FAIL;
2105                }
2106        }
2107
2108        if (mask_tune_func & WRITE_LEVELING_LF_MASK_BIT) {
2109                training_stage = WRITE_LEVELING_LF;
2110                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2111                        ("WRITE_LEVELING_LF_MASK_BIT\n"));
2112                ret = ddr3_tip_dynamic_write_leveling(dev_num, 1);
2113                if (is_reg_dump != 0)
2114                        ddr3_tip_reg_dump(dev_num);
2115                if (ret != MV_OK) {
2116                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2117                                ("ddr3_tip_dynamic_write_leveling LF failure\n"));
2118                        if (debug_mode == 0)
2119                                return MV_FAIL;
2120                }
2121        }
2122
2123        for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2124                if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
2125                        training_stage = LOAD_PATTERN;
2126                        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2127                                          ("LOAD_PATTERN_MASK_BIT #%d\n",
2128                                           effective_cs));
2129                        ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2130                        if (is_reg_dump != 0)
2131                                ddr3_tip_reg_dump(dev_num);
2132                        if (ret != MV_OK) {
2133                                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2134                                                  ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n",
2135                                                   effective_cs));
2136                                if (debug_mode == 0)
2137                                        return MV_FAIL;
2138                        }
2139                }
2140        }
2141
2142        if (adll_bypass_flag == 1) {
2143                for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2144                        ddr3_tip_adll_regs_bypass(dev_num, phy_reg1_val, 0);
2145                        adll_bypass_flag = 0;
2146                }
2147        }
2148
2149        /* Set to 0 after each loop to avoid illegal value may be used */
2150        effective_cs = 0;
2151
2152        if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
2153                training_stage = SET_MEDIUM_FREQ;
2154                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2155                                  ("SET_MEDIUM_FREQ_MASK_BIT %d\n",
2156                                   freq_tbl[medium_freq]));
2157                ret =
2158                        ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2159                                          PARAM_NOT_CARE, medium_freq);
2160                if (is_reg_dump != 0)
2161                        ddr3_tip_reg_dump(dev_num);
2162                if (ret != MV_OK) {
2163                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2164                                          ("ddr3_tip_freq_set failure\n"));
2165                        if (debug_mode == 0)
2166                                return MV_FAIL;
2167                }
2168        }
2169
2170        if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
2171                training_stage = WRITE_LEVELING;
2172                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2173                                  ("WRITE_LEVELING_MASK_BIT\n"));
2174                if ((rl_mid_freq_wa == 0) || (freq_tbl[medium_freq] == 533)) {
2175                        ret = ddr3_tip_dynamic_write_leveling(dev_num, 0);
2176                } else {
2177                        /* Use old WL */
2178                        ret = ddr3_tip_legacy_dynamic_write_leveling(dev_num);
2179                }
2180
2181                if (is_reg_dump != 0)
2182                        ddr3_tip_reg_dump(dev_num);
2183                if (ret != MV_OK) {
2184                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2185                                          ("ddr3_tip_dynamic_write_leveling failure\n"));
2186                        if (debug_mode == 0)
2187                                return MV_FAIL;
2188                }
2189        }
2190
2191        for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2192                if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
2193                        training_stage = LOAD_PATTERN_2;
2194                        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2195                                          ("LOAD_PATTERN_2_MASK_BIT CS #%d\n",
2196                                           effective_cs));
2197                        ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2198                        if (is_reg_dump != 0)
2199                                ddr3_tip_reg_dump(dev_num);
2200                        if (ret != MV_OK) {
2201                                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2202                                                  ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n",
2203                                                   effective_cs));
2204                                if (debug_mode == 0)
2205                                        return MV_FAIL;
2206                        }
2207                }
2208        }
2209        /* Set to 0 after each loop to avoid illegal value may be used */
2210        effective_cs = 0;
2211
2212        if (mask_tune_func & READ_LEVELING_MASK_BIT) {
2213                training_stage = READ_LEVELING;
2214                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2215                                  ("READ_LEVELING_MASK_BIT\n"));
2216                if ((rl_mid_freq_wa == 0) || (freq_tbl[medium_freq] == 533)) {
2217                        ret = ddr3_tip_dynamic_read_leveling(dev_num, medium_freq);
2218                } else {
2219                        /* Use old RL */
2220                        ret = ddr3_tip_legacy_dynamic_read_leveling(dev_num);
2221                }
2222
2223                if (is_reg_dump != 0)
2224                        ddr3_tip_reg_dump(dev_num);
2225                if (ret != MV_OK) {
2226                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2227                                          ("ddr3_tip_dynamic_read_leveling failure\n"));
2228                        if (debug_mode == 0)
2229                                return MV_FAIL;
2230                }
2231        }
2232
2233        if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
2234                training_stage = WRITE_LEVELING_SUPP;
2235                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2236                                  ("WRITE_LEVELING_SUPP_MASK_BIT\n"));
2237                ret = ddr3_tip_dynamic_write_leveling_supp(dev_num);
2238                if (is_reg_dump != 0)
2239                        ddr3_tip_reg_dump(dev_num);
2240                if (ret != MV_OK) {
2241                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2242                                          ("ddr3_tip_dynamic_write_leveling_supp failure\n"));
2243                        if (debug_mode == 0)
2244                                return MV_FAIL;
2245                }
2246        }
2247
2248        for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2249                if (mask_tune_func & PBS_RX_MASK_BIT) {
2250                        training_stage = PBS_RX;
2251                        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2252                                          ("PBS_RX_MASK_BIT CS #%d\n",
2253                                           effective_cs));
2254                        ret = ddr3_tip_pbs_rx(dev_num);
2255                        if (is_reg_dump != 0)
2256                                ddr3_tip_reg_dump(dev_num);
2257                        if (ret != MV_OK) {
2258                                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2259                                                  ("ddr3_tip_pbs_rx failure CS #%d\n",
2260                                                   effective_cs));
2261                                if (debug_mode == 0)
2262                                        return MV_FAIL;
2263                        }
2264                }
2265        }
2266
2267        for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2268                if (mask_tune_func & PBS_TX_MASK_BIT) {
2269                        training_stage = PBS_TX;
2270                        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2271                                          ("PBS_TX_MASK_BIT CS #%d\n",
2272                                           effective_cs));
2273                        ret = ddr3_tip_pbs_tx(dev_num);
2274                        if (is_reg_dump != 0)
2275                                ddr3_tip_reg_dump(dev_num);
2276                        if (ret != MV_OK) {
2277                                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2278                                                  ("ddr3_tip_pbs_tx failure CS #%d\n",
2279                                                   effective_cs));
2280                                if (debug_mode == 0)
2281                                        return MV_FAIL;
2282                        }
2283                }
2284        }
2285        /* Set to 0 after each loop to avoid illegal value may be used */
2286        effective_cs = 0;
2287
2288        if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
2289                training_stage = SET_TARGET_FREQ;
2290                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2291                                  ("SET_TARGET_FREQ_MASK_BIT %d\n",
2292                                   freq_tbl[tm->
2293                                            interface_params[first_active_if].
2294                                            memory_freq]));
2295                ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2296                                        PARAM_NOT_CARE,
2297                                        tm->interface_params[first_active_if].
2298                                        memory_freq);
2299                if (is_reg_dump != 0)
2300                        ddr3_tip_reg_dump(dev_num);
2301                if (ret != MV_OK) {
2302                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2303                                          ("ddr3_tip_freq_set failure\n"));
2304                        if (debug_mode == 0)
2305                                return MV_FAIL;
2306                }
2307        }
2308
2309        if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
2310                training_stage = WRITE_LEVELING_TF;
2311                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2312                                  ("WRITE_LEVELING_TF_MASK_BIT\n"));
2313                ret = ddr3_tip_dynamic_write_leveling(dev_num, 0);
2314                if (is_reg_dump != 0)
2315                        ddr3_tip_reg_dump(dev_num);
2316                if (ret != MV_OK) {
2317                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2318                                          ("ddr3_tip_dynamic_write_leveling TF failure\n"));
2319                        if (debug_mode == 0)
2320                                return MV_FAIL;
2321                }
2322        }
2323
2324        if (mask_tune_func & LOAD_PATTERN_HIGH_MASK_BIT) {
2325                training_stage = LOAD_PATTERN_HIGH;
2326                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("LOAD_PATTERN_HIGH\n"));
2327                ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2328                if (is_reg_dump != 0)
2329                        ddr3_tip_reg_dump(dev_num);
2330                if (ret != MV_OK) {
2331                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2332                                          ("ddr3_tip_load_all_pattern_to_mem failure\n"));
2333                        if (debug_mode == 0)
2334                                return MV_FAIL;
2335                }
2336        }
2337
2338        if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
2339                training_stage = READ_LEVELING_TF;
2340                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2341                                  ("READ_LEVELING_TF_MASK_BIT\n"));
2342                ret = ddr3_tip_dynamic_read_leveling(dev_num, tm->
2343                                                     interface_params[first_active_if].
2344                                                     memory_freq);
2345                if (is_reg_dump != 0)
2346                        ddr3_tip_reg_dump(dev_num);
2347                if (ret != MV_OK) {
2348                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2349                                          ("ddr3_tip_dynamic_read_leveling TF failure\n"));
2350                        if (debug_mode == 0)
2351                                return MV_FAIL;
2352                }
2353        }
2354
2355        if (mask_tune_func & RL_DQS_BURST_MASK_BIT) {
2356                training_stage = READ_LEVELING_TF;
2357                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2358                                  ("RL_DQS_BURST_MASK_BIT\n"));
2359                ret = mv_ddr_rl_dqs_burst(0, 0, tm->interface_params[0].memory_freq);
2360                if (is_reg_dump != 0)
2361                        ddr3_tip_reg_dump(dev_num);
2362                if (ret != MV_OK) {
2363                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2364                                          ("mv_ddr_rl_dqs_burst TF failure\n"));
2365                        if (debug_mode == 0)
2366                                return MV_FAIL;
2367                }
2368        }
2369
2370        if (mask_tune_func & DM_PBS_TX_MASK_BIT) {
2371                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DM_PBS_TX_MASK_BIT\n"));
2372        }
2373
2374        for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2375                if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
2376                        training_stage = VREF_CALIBRATION;
2377                        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("VREF\n"));
2378                        ret = ddr3_tip_vref(dev_num);
2379                        if (is_reg_dump != 0) {
2380                                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2381                                                  ("VREF Dump\n"));
2382                                ddr3_tip_reg_dump(dev_num);
2383                        }
2384                        if (ret != MV_OK) {
2385                                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2386                                                  ("ddr3_tip_vref failure\n"));
2387                                if (debug_mode == 0)
2388                                        return MV_FAIL;
2389                        }
2390                }
2391        }
2392        /* Set to 0 after each loop to avoid illegal value may be used */
2393        effective_cs = 0;
2394
2395        for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2396                if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
2397                        training_stage = CENTRALIZATION_RX;
2398                        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2399                                          ("CENTRALIZATION_RX_MASK_BIT CS #%d\n",
2400                                           effective_cs));
2401                        ret = ddr3_tip_centralization_rx(dev_num);
2402                        if (is_reg_dump != 0)
2403                                ddr3_tip_reg_dump(dev_num);
2404                        if (ret != MV_OK) {
2405                                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2406                                                  ("ddr3_tip_centralization_rx failure CS #%d\n",
2407                                                   effective_cs));
2408                                if (debug_mode == 0)
2409                                        return MV_FAIL;
2410                        }
2411                }
2412        }
2413        /* Set to 0 after each loop to avoid illegal value may be used */
2414        effective_cs = 0;
2415
2416        for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2417                if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
2418                        training_stage = WRITE_LEVELING_SUPP_TF;
2419                        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2420                                          ("WRITE_LEVELING_SUPP_TF_MASK_BIT CS #%d\n",
2421                                           effective_cs));
2422                        ret = ddr3_tip_dynamic_write_leveling_supp(dev_num);
2423                        if (is_reg_dump != 0)
2424                                ddr3_tip_reg_dump(dev_num);
2425                        if (ret != MV_OK) {
2426                                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2427                                                  ("ddr3_tip_dynamic_write_leveling_supp TF failure CS #%d\n",
2428                                                   effective_cs));
2429                                if (debug_mode == 0)
2430                                        return MV_FAIL;
2431                        }
2432                }
2433        }
2434        /* Set to 0 after each loop to avoid illegal value may be used */
2435        effective_cs = 0;
2436
2437
2438        for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2439                if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
2440                        training_stage = CENTRALIZATION_TX;
2441                        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2442                                          ("CENTRALIZATION_TX_MASK_BIT CS #%d\n",
2443                                           effective_cs));
2444                        ret = ddr3_tip_centralization_tx(dev_num);
2445                        if (is_reg_dump != 0)
2446                                ddr3_tip_reg_dump(dev_num);
2447                        if (ret != MV_OK) {
2448                                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2449                                                  ("ddr3_tip_centralization_tx failure CS #%d\n",
2450                                                   effective_cs));
2451                                if (debug_mode == 0)
2452                                        return MV_FAIL;
2453                        }
2454                }
2455        }
2456        /* Set to 0 after each loop to avoid illegal value may be used */
2457        effective_cs = 0;
2458
2459        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("restore registers to default\n"));
2460        /* restore register values */
2461        CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
2462
2463        if (is_reg_dump != 0)
2464                ddr3_tip_reg_dump(dev_num);
2465
2466        return MV_OK;
2467}
2468
2469/*
2470 * DDR3 Dynamic training flow
2471 */
2472static int ddr3_tip_ddr3_auto_tune(u32 dev_num)
2473{
2474        int status;
2475        u32 if_id, stage;
2476        int is_if_fail = 0, is_auto_tune_fail = 0;
2477
2478        training_stage = INIT_CONTROLLER;
2479
2480        for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2481                for (stage = 0; stage < MAX_STAGE_LIMIT; stage++)
2482                        training_result[stage][if_id] = NO_TEST_DONE;
2483        }
2484
2485        status = ddr3_tip_ddr3_training_main_flow(dev_num);
2486
2487        /* activate XSB test */
2488        if (xsb_validate_type != 0) {
2489                run_xsb_test(dev_num, xsb_validation_base_address, 1, 1,
2490                             0x1024);
2491        }
2492
2493        if (is_reg_dump != 0)
2494                ddr3_tip_reg_dump(dev_num);
2495
2496        /* print log */
2497        CHECK_STATUS(ddr3_tip_print_log(dev_num, window_mem_addr));
2498
2499#ifndef EXCLUDE_DEBUG_PRINTS
2500        if (status != MV_OK) {
2501                CHECK_STATUS(ddr3_tip_print_stability_log(dev_num));
2502        }
2503#endif /* EXCLUDE_DEBUG_PRINTS */
2504
2505        for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2506                is_if_fail = 0;
2507                for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) {
2508                        if (training_result[stage][if_id] == TEST_FAILED)
2509                                is_if_fail = 1;
2510                }
2511                if (is_if_fail == 1) {
2512                        is_auto_tune_fail = 1;
2513                        DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2514                                          ("Auto Tune failed for IF %d\n",
2515                                           if_id));
2516                }
2517        }
2518
2519        if (((status == MV_FAIL) && (is_auto_tune_fail == 0)) ||
2520            ((status == MV_OK) && (is_auto_tune_fail == 1))) {
2521                /*
2522                 * If MainFlow result and trainingResult DB not in sync,
2523                 * issue warning (caused by no update of trainingResult DB
2524                 * when failed)
2525                 */
2526                DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2527                                  ("Warning: Algorithm return value and Result DB"
2528                                   "are not synced (status 0x%x  result DB %d)\n",
2529                                   status, is_auto_tune_fail));
2530        }
2531
2532        if ((status != MV_OK) || (is_auto_tune_fail == 1))
2533                return MV_FAIL;
2534        else
2535                return MV_OK;
2536}
2537
2538/*
2539 * Enable init sequence
2540 */
2541int ddr3_tip_enable_init_sequence(u32 dev_num)
2542{
2543        int is_fail = 0;
2544        u32 if_id = 0, mem_mask = 0, bus_index = 0;
2545        u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
2546        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2547
2548        /* Enable init sequence */
2549        CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 0,
2550                                       SDRAM_INIT_CTRL_REG, 0x1, 0x1));
2551
2552        for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2553                VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
2554
2555                if (ddr3_tip_if_polling
2556                    (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1,
2557                     SDRAM_INIT_CTRL_REG,
2558                     MAX_POLLING_ITERATIONS) != MV_OK) {
2559                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2560                                          ("polling failed IF %d\n",
2561                                           if_id));
2562                        is_fail = 1;
2563                        continue;
2564                }
2565
2566                mem_mask = 0;
2567                for (bus_index = 0; bus_index < octets_per_if_num;
2568                     bus_index++) {
2569                        VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_index);
2570                        mem_mask |=
2571                                tm->interface_params[if_id].
2572                                as_bus_params[bus_index].mirror_enable_bitmask;
2573                }
2574
2575                if (mem_mask != 0) {
2576                        /* Disable Multi CS */
2577                        CHECK_STATUS(ddr3_tip_if_write
2578                                     (dev_num, ACCESS_TYPE_MULTICAST,
2579                                      if_id, DUAL_DUNIT_CFG_REG, 1 << 3,
2580                                      1 << 3));
2581                }
2582        }
2583
2584        return (is_fail == 0) ? MV_OK : MV_FAIL;
2585}
2586
2587int ddr3_tip_register_dq_table(u32 dev_num, u32 *table)
2588{
2589        dq_map_table = table;
2590
2591        return MV_OK;
2592}
2593
2594/*
2595 * Check if pup search is locked
2596 */
2597int ddr3_tip_is_pup_lock(u32 *pup_buf, enum hws_training_result read_mode)
2598{
2599        u32 bit_start = 0, bit_end = 0, bit_id;
2600
2601        if (read_mode == RESULT_PER_BIT) {
2602                bit_start = 0;
2603                bit_end = BUS_WIDTH_IN_BITS - 1;
2604        } else {
2605                bit_start = 0;
2606                bit_end = 0;
2607        }
2608
2609        for (bit_id = bit_start; bit_id <= bit_end; bit_id++) {
2610                if (GET_LOCK_RESULT(pup_buf[bit_id]) == 0)
2611                        return 0;
2612        }
2613
2614        return 1;
2615}
2616
2617/*
2618 * Get minimum buffer value
2619 */
2620u8 ddr3_tip_get_buf_min(u8 *buf_ptr)
2621{
2622        u8 min_val = 0xff;
2623        u8 cnt = 0;
2624
2625        for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) {
2626                if (buf_ptr[cnt] < min_val)
2627                        min_val = buf_ptr[cnt];
2628        }
2629
2630        return min_val;
2631}
2632
2633/*
2634 * Get maximum buffer value
2635 */
2636u8 ddr3_tip_get_buf_max(u8 *buf_ptr)
2637{
2638        u8 max_val = 0;
2639        u8 cnt = 0;
2640
2641        for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) {
2642                if (buf_ptr[cnt] > max_val)
2643                        max_val = buf_ptr[cnt];
2644        }
2645
2646        return max_val;
2647}
2648
2649/*
2650 * The following functions return memory parameters:
2651 * bus and device width, device size
2652 */
2653
2654u32 hws_ddr3_get_bus_width(void)
2655{
2656        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2657
2658        return (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) ==
2659                1) ? 16 : 32;
2660}
2661
2662u32 hws_ddr3_get_device_width(u32 if_id)
2663{
2664        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2665
2666        return (tm->interface_params[if_id].bus_width ==
2667                MV_DDR_DEV_WIDTH_8BIT) ? 8 : 16;
2668}
2669
2670u32 hws_ddr3_get_device_size(u32 if_id)
2671{
2672        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2673
2674        if (tm->interface_params[if_id].memory_size >=
2675            MV_DDR_DIE_CAP_LAST) {
2676                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2677                                  ("Error: Wrong device size of Cs: %d",
2678                                   tm->interface_params[if_id].memory_size));
2679                return 0;
2680        } else {
2681                return 1 << tm->interface_params[if_id].memory_size;
2682        }
2683}
2684
2685int hws_ddr3_calc_mem_cs_size(u32 if_id, u32 cs, u32 *cs_size)
2686{
2687        u32 cs_mem_size, dev_size;
2688
2689        dev_size = hws_ddr3_get_device_size(if_id);
2690        if (dev_size != 0) {
2691                cs_mem_size = ((hws_ddr3_get_bus_width() /
2692                                hws_ddr3_get_device_width(if_id)) * dev_size);
2693
2694                /* the calculated result in Gbytex16 to avoid float using */
2695
2696                if (cs_mem_size == 2) {
2697                        *cs_size = _128M;
2698                } else if (cs_mem_size == 4) {
2699                        *cs_size = _256M;
2700                } else if (cs_mem_size == 8) {
2701                        *cs_size = _512M;
2702                } else if (cs_mem_size == 16) {
2703                        *cs_size = _1G;
2704                } else if (cs_mem_size == 32) {
2705                        *cs_size = _2G;
2706                } else {
2707                        DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2708                                          ("Error: Wrong Memory size of Cs: %d", cs));
2709                        return MV_FAIL;
2710                }
2711                return MV_OK;
2712        } else {
2713                return MV_FAIL;
2714        }
2715}
2716
2717int hws_ddr3_cs_base_adr_calc(u32 if_id, u32 cs, u32 *cs_base_addr)
2718{
2719        u32 cs_mem_size = 0;
2720#ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
2721        u32 physical_mem_size;
2722        u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
2723#endif
2724
2725        if (hws_ddr3_calc_mem_cs_size(if_id, cs, &cs_mem_size) != MV_OK)
2726                return MV_FAIL;
2727
2728#ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
2729        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2730        /*
2731         * if number of address pins doesn't allow to use max mem size that
2732         * is defined in topology mem size is defined by
2733         * DEVICE_MAX_DRAM_ADDRESS_SIZE
2734         */
2735        physical_mem_size = mem_size[tm->interface_params[0].memory_size];
2736
2737        if (hws_ddr3_get_device_width(cs) == 16) {
2738                /*
2739                 * 16bit mem device can be twice more - no need in less
2740                 * significant pin
2741                 */
2742                max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
2743        }
2744
2745        if (physical_mem_size > max_mem_size) {
2746                cs_mem_size = max_mem_size *
2747                        (hws_ddr3_get_bus_width() /
2748                         hws_ddr3_get_device_width(if_id));
2749                DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2750                                  ("Updated Physical Mem size is from 0x%x to %x\n",
2751                                   physical_mem_size,
2752                                   DEVICE_MAX_DRAM_ADDRESS_SIZE));
2753        }
2754#endif
2755
2756        /* calculate CS base addr */
2757        *cs_base_addr = ((cs_mem_size) * cs) & 0xffff0000;
2758
2759        return MV_OK;
2760}
2761
2762/* TODO: consider to move to misl phy driver */
2763enum {
2764        MISL_PHY_DRV_OHM_30 = 0xf,
2765        MISL_PHY_DRV_OHM_48 = 0xa,
2766        MISL_PHY_DRV_OHM_80 = 0x6,
2767        MISL_PHY_DRV_OHM_120 = 0x4
2768};
2769
2770enum {
2771        MISL_PHY_ODT_OHM_60 = 0x8,
2772        MISL_PHY_ODT_OHM_80 = 0x6,
2773        MISL_PHY_ODT_OHM_120 = 0x4,
2774        MISL_PHY_ODT_OHM_240 = 0x2
2775};
2776
2777static unsigned int mv_ddr_misl_phy_drv_calc(unsigned int cfg)
2778{
2779        unsigned int val;
2780
2781        switch (cfg) {
2782        case MV_DDR_OHM_30:
2783                val = MISL_PHY_DRV_OHM_30;
2784                break;
2785        case MV_DDR_OHM_48:
2786                val = MISL_PHY_DRV_OHM_48;
2787                break;
2788        case MV_DDR_OHM_80:
2789                val = MISL_PHY_DRV_OHM_80;
2790                break;
2791        case MV_DDR_OHM_120:
2792                val = MISL_PHY_DRV_OHM_120;
2793                break;
2794        default:
2795                val = PARAM_UNDEFINED;
2796        }
2797
2798        return val;
2799}
2800
2801static unsigned int mv_ddr_misl_phy_odt_calc(unsigned int cfg)
2802{
2803        unsigned int val;
2804
2805        switch (cfg) {
2806        case MV_DDR_OHM_60:
2807                val = MISL_PHY_ODT_OHM_60;
2808                break;
2809        case MV_DDR_OHM_80:
2810                val = MISL_PHY_ODT_OHM_80;
2811                break;
2812        case MV_DDR_OHM_120:
2813                val = MISL_PHY_ODT_OHM_120;
2814                break;
2815        case MV_DDR_OHM_240:
2816                val = MISL_PHY_ODT_OHM_240;
2817                break;
2818        default:
2819                val = PARAM_UNDEFINED;
2820        }
2821
2822        return val;
2823}
2824
2825unsigned int mv_ddr_misl_phy_drv_data_p_get(void)
2826{
2827        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2828        unsigned int drv_data_p = mv_ddr_misl_phy_drv_calc(tm->edata.phy_edata.drv_data_p);
2829
2830        if (drv_data_p == PARAM_UNDEFINED)
2831                printf("error: %s: unsupported drv_data_p parameter found\n", __func__);
2832
2833        return drv_data_p;
2834}
2835
2836unsigned int mv_ddr_misl_phy_drv_data_n_get(void)
2837{
2838        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2839        unsigned int drv_data_n = mv_ddr_misl_phy_drv_calc(tm->edata.phy_edata.drv_data_n);
2840
2841        if (drv_data_n == PARAM_UNDEFINED)
2842                printf("error: %s: unsupported drv_data_n parameter found\n", __func__);
2843
2844        return drv_data_n;
2845}
2846
2847unsigned int mv_ddr_misl_phy_drv_ctrl_p_get(void)
2848{
2849        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2850        unsigned int drv_ctrl_p = mv_ddr_misl_phy_drv_calc(tm->edata.phy_edata.drv_ctrl_p);
2851
2852        if (drv_ctrl_p == PARAM_UNDEFINED)
2853                printf("error: %s: unsupported drv_ctrl_p parameter found\n", __func__);
2854
2855        return drv_ctrl_p;
2856}
2857
2858unsigned int mv_ddr_misl_phy_drv_ctrl_n_get(void)
2859{
2860        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2861        unsigned int drv_ctrl_n = mv_ddr_misl_phy_drv_calc(tm->edata.phy_edata.drv_ctrl_n);
2862
2863        if (drv_ctrl_n == PARAM_UNDEFINED)
2864                printf("error: %s: unsupported drv_ctrl_n parameter found\n", __func__);
2865
2866        return drv_ctrl_n;
2867}
2868
2869unsigned int mv_ddr_misl_phy_odt_p_get(void)
2870{
2871        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2872        unsigned int cs_num = mv_ddr_cs_num_get();
2873        unsigned int odt_p = PARAM_UNDEFINED;
2874
2875        if (cs_num > 0 && cs_num <= MAX_CS_NUM)
2876                odt_p = mv_ddr_misl_phy_odt_calc(tm->edata.phy_edata.odt_p[cs_num - 1]);
2877
2878        if (odt_p == PARAM_UNDEFINED)
2879                printf("error: %s: unsupported odt_p parameter found\n", __func__);
2880
2881        return odt_p;
2882}
2883
2884unsigned int mv_ddr_misl_phy_odt_n_get(void)
2885{
2886        struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
2887        unsigned int cs_num = mv_ddr_cs_num_get();
2888        unsigned int odt_n = PARAM_UNDEFINED;
2889
2890        if (cs_num > 0 && cs_num <= MAX_CS_NUM)
2891                odt_n = mv_ddr_misl_phy_odt_calc(tm->edata.phy_edata.odt_n[cs_num - 1]);
2892
2893        if (odt_n == PARAM_UNDEFINED)
2894                printf("error: %s: unsupported odt_n parameter found\n", __func__);
2895
2896        return odt_n;
2897}
2898