1
2
3
4
5
6#include "ddr3_init.h"
7#include "mv_ddr_common.h"
8#include "mv_ddr_training_db.h"
9#include "mv_ddr_regs.h"
10#include "mv_ddr_sys_env_lib.h"
11
12#define DDR_INTERFACES_NUM 1
13#define DDR_INTERFACE_OCTETS_NUM 5
14
15
16
17
18
19
20
21#define L2_FILTER_FOR_MAX_MEMORY_SIZE 0xC0000000
22#define ADDRESS_FILTERING_END_REGISTER 0x8c04
23
24#define DYNAMIC_CS_SIZE_CONFIG
25#define DISABLE_L2_FILTERING_DURING_DDR_TRAINING
26
27
28#define TSEN_CONTROL_LSB_REG 0xE4070
29#define TSEN_CONTROL_LSB_TC_TRIM_OFFSET 0
30#define TSEN_CONTROL_LSB_TC_TRIM_MASK (0x7 << TSEN_CONTROL_LSB_TC_TRIM_OFFSET)
31#define TSEN_CONTROL_MSB_REG 0xE4074
32#define TSEN_CONTROL_MSB_RST_OFFSET 8
33#define TSEN_CONTROL_MSB_RST_MASK (0x1 << TSEN_CONTROL_MSB_RST_OFFSET)
34#define TSEN_STATUS_REG 0xe4078
35#define TSEN_STATUS_READOUT_VALID_OFFSET 10
36#define TSEN_STATUS_READOUT_VALID_MASK (0x1 << \
37 TSEN_STATUS_READOUT_VALID_OFFSET)
38#define TSEN_STATUS_TEMP_OUT_OFFSET 0
39#define TSEN_STATUS_TEMP_OUT_MASK (0x3ff << TSEN_STATUS_TEMP_OUT_OFFSET)
40
41static struct dlb_config ddr3_dlb_config_table[] = {
42 {DLB_CTRL_REG, 0x2000005c},
43 {DLB_BUS_OPT_WT_REG, 0x00880000},
44 {DLB_AGING_REG, 0x0f7f007f},
45 {DLB_EVICTION_CTRL_REG, 0x0000129f},
46 {DLB_EVICTION_TIMERS_REG, 0x00ff0000},
47 {DLB_WTS_DIFF_CS_REG, 0x04030802},
48 {DLB_WTS_DIFF_BG_REG, 0x00000a02},
49 {DLB_WTS_SAME_BG_REG, 0x09000a01},
50 {DLB_WTS_CMDS_REG, 0x00020005},
51 {DLB_WTS_ATTR_PRIO_REG, 0x00060f10},
52 {DLB_QUEUE_MAP_REG, 0x00000543},
53 {DLB_SPLIT_REG, 0x00000000},
54 {DLB_USER_CMD_REG, 0x00000000},
55 {0x0, 0x0}
56};
57
58static struct dlb_config *sys_env_dlb_config_ptr_get(void)
59{
60 return &ddr3_dlb_config_table[0];
61}
62
63static u8 a38x_bw_per_freq[MV_DDR_FREQ_LAST] = {
64 0x3,
65 0x4,
66 0x4,
67 0x5,
68 0x5,
69 0x5,
70 0x5,
71 0x3,
72 0x3,
73 0x4,
74 0x5,
75 0x5,
76 0x3,
77 0x5,
78 0x3,
79 0x5
80};
81
82static u8 a38x_rate_per_freq[MV_DDR_FREQ_LAST] = {
83 0x1,
84 0x2,
85 0x2,
86 0x2,
87 0x2,
88 0x3,
89 0x3,
90 0x1,
91 0x1,
92 0x2,
93 0x2,
94 0x2,
95 0x1,
96 0x2,
97 0x1,
98 0x2
99};
100
101static u16 a38x_vco_freq_per_sar_ref_clk_25_mhz[] = {
102 666,
103 1332,
104 800,
105 1600,
106 1066,
107 2132,
108 1200,
109 2400,
110 1332,
111 1332,
112 1500,
113 1500,
114 1600,
115 1600,
116 1700,
117 1700,
118 1866,
119 1866,
120 1800,
121 2000,
122 2000,
123 4000,
124 2132,
125 2132,
126 2300,
127 2300,
128 2400,
129 2400,
130 2500,
131 2500,
132 800
133};
134
135static u16 a38x_vco_freq_per_sar_ref_clk_40_mhz[] = {
136 666,
137 1332,
138 800,
139 800,
140 1066,
141 1066,
142 1200,
143 2400,
144 1332,
145 1332,
146 1500,
147 1600,
148 1600,
149 1600,
150 1700,
151 1560,
152 1866,
153 1866,
154 1800,
155 2000,
156 2000,
157 4000,
158 2132,
159 2132,
160 2300,
161 2300,
162 2400,
163 2400,
164 2500,
165 2500,
166 1800
167};
168
169
170static u32 dq_bit_map_2_phy_pin[] = {
171 1, 0, 2, 6, 9, 8, 3, 7,
172 8, 9, 1, 7, 2, 6, 3, 0,
173 3, 9, 7, 8, 1, 0, 2, 6,
174 1, 0, 6, 2, 8, 3, 7, 9,
175 0, 1, 2, 9, 7, 8, 3, 6,
176};
177
178void mv_ddr_mem_scrubbing(void)
179{
180 ddr3_new_tip_ecc_scrub();
181}
182
183static int ddr3_tip_a38x_set_divider(u8 dev_num, u32 if_id,
184 enum mv_ddr_freq freq);
185
186
187
188
189static u32 ddr3_ctrl_get_junc_temp(u8 dev_num)
190{
191 int reg = 0;
192
193
194 if ((reg_read(TSEN_CONTROL_MSB_REG) & TSEN_CONTROL_MSB_RST_MASK) == 0) {
195 reg_bit_set(TSEN_CONTROL_MSB_REG, TSEN_CONTROL_MSB_RST_MASK);
196
197 reg = reg_read(TSEN_CONTROL_LSB_REG);
198 reg &= ~TSEN_CONTROL_LSB_TC_TRIM_MASK;
199 reg |= 0x3 << TSEN_CONTROL_LSB_TC_TRIM_OFFSET;
200 reg_write(TSEN_CONTROL_LSB_REG, reg);
201 }
202 mdelay(10);
203
204
205 if ((reg_read(TSEN_STATUS_REG) & TSEN_STATUS_READOUT_VALID_MASK) == 0) {
206 printf("%s: TSEN not ready\n", __func__);
207 return 0;
208 }
209
210 reg = reg_read(TSEN_STATUS_REG);
211 reg = (reg & TSEN_STATUS_TEMP_OUT_MASK) >> TSEN_STATUS_TEMP_OUT_OFFSET;
212
213 return ((((10000 * reg) / 21445) * 1000) - 272674) / 1000;
214}
215
216
217
218
219
220
221
222
223static int ddr3_tip_a38x_get_freq_config(u8 dev_num, enum mv_ddr_freq freq,
224 struct hws_tip_freq_config_info
225 *freq_config_info)
226{
227 if (a38x_bw_per_freq[freq] == 0xff)
228 return MV_NOT_SUPPORTED;
229
230 if (freq_config_info == NULL)
231 return MV_BAD_PARAM;
232
233 freq_config_info->bw_per_freq = a38x_bw_per_freq[freq];
234 freq_config_info->rate_per_freq = a38x_rate_per_freq[freq];
235 freq_config_info->is_supported = 1;
236
237 return MV_OK;
238}
239
240static void dunit_read(u32 addr, u32 mask, u32 *data)
241{
242 *data = reg_read(addr) & mask;
243}
244
245static void dunit_write(u32 addr, u32 mask, u32 data)
246{
247 u32 reg_val = data;
248
249 if (mask != MASK_ALL_BITS) {
250 dunit_read(addr, MASK_ALL_BITS, ®_val);
251 reg_val &= (~mask);
252 reg_val |= (data & mask);
253 }
254
255 reg_write(addr, reg_val);
256}
257
258#define ODPG_ENABLE_REG 0x186d4
259#define ODPG_EN_OFFS 0
260#define ODPG_EN_MASK 0x1
261#define ODPG_EN_ENA 1
262#define ODPG_EN_DONE 0
263#define ODPG_DIS_OFFS 8
264#define ODPG_DIS_MASK 0x1
265#define ODPG_DIS_DIS 1
266void mv_ddr_odpg_enable(void)
267{
268 dunit_write(ODPG_ENABLE_REG,
269 ODPG_EN_MASK << ODPG_EN_OFFS,
270 ODPG_EN_ENA << ODPG_EN_OFFS);
271}
272
273void mv_ddr_odpg_disable(void)
274{
275 dunit_write(ODPG_ENABLE_REG,
276 ODPG_DIS_MASK << ODPG_DIS_OFFS,
277 ODPG_DIS_DIS << ODPG_DIS_OFFS);
278}
279
280void mv_ddr_odpg_done_clr(void)
281{
282 return;
283}
284
285int mv_ddr_is_odpg_done(u32 count)
286{
287 u32 i, data;
288
289 for (i = 0; i < count; i++) {
290 dunit_read(ODPG_ENABLE_REG, MASK_ALL_BITS, &data);
291 if (((data >> ODPG_EN_OFFS) & ODPG_EN_MASK) ==
292 ODPG_EN_DONE)
293 break;
294 }
295
296 if (i >= count) {
297 printf("%s: timeout\n", __func__);
298 return MV_FAIL;
299 }
300
301 return MV_OK;
302}
303
304void mv_ddr_training_enable(void)
305{
306 dunit_write(GLOB_CTRL_STATUS_REG,
307 TRAINING_TRIGGER_MASK << TRAINING_TRIGGER_OFFS,
308 TRAINING_TRIGGER_ENA << TRAINING_TRIGGER_OFFS);
309}
310
311#define DRAM_INIT_CTRL_STATUS_REG 0x18488
312#define TRAINING_TRIGGER_OFFS 0
313#define TRAINING_TRIGGER_MASK 0x1
314#define TRAINING_TRIGGER_ENA 1
315#define TRAINING_DONE_OFFS 1
316#define TRAINING_DONE_MASK 0x1
317#define TRAINING_DONE_DONE 1
318#define TRAINING_DONE_NOT_DONE 0
319#define TRAINING_RESULT_OFFS 2
320#define TRAINING_RESULT_MASK 0x1
321#define TRAINING_RESULT_PASS 0
322#define TRAINING_RESULT_FAIL 1
323int mv_ddr_is_training_done(u32 count, u32 *result)
324{
325 u32 i, data;
326
327 if (result == NULL) {
328 printf("%s: NULL result pointer found\n", __func__);
329 return MV_FAIL;
330 }
331
332 for (i = 0; i < count; i++) {
333 dunit_read(DRAM_INIT_CTRL_STATUS_REG, MASK_ALL_BITS, &data);
334 if (((data >> TRAINING_DONE_OFFS) & TRAINING_DONE_MASK) ==
335 TRAINING_DONE_DONE)
336 break;
337 }
338
339 if (i >= count) {
340 printf("%s: timeout\n", __func__);
341 return MV_FAIL;
342 }
343
344 *result = (data >> TRAINING_RESULT_OFFS) & TRAINING_RESULT_MASK;
345
346 return MV_OK;
347}
348
349#define DM_PAD 10
350u32 mv_ddr_dm_pad_get(void)
351{
352 return DM_PAD;
353}
354
355
356
357
358
359
360
361
362
363static int ddr3_tip_a38x_select_ddr_controller(u8 dev_num, int enable)
364{
365 u32 reg;
366
367 reg = reg_read(DUAL_DUNIT_CFG_REG);
368
369 if (enable)
370 reg |= (1 << 6);
371 else
372 reg &= ~(1 << 6);
373
374 reg_write(DUAL_DUNIT_CFG_REG, reg);
375
376 return MV_OK;
377}
378
379static u8 ddr3_tip_clock_mode(u32 frequency)
380{
381 if ((frequency == MV_DDR_FREQ_LOW_FREQ) || (mv_ddr_freq_get(frequency) <= 400))
382 return 1;
383
384 return 2;
385}
386
387static int mv_ddr_sar_freq_get(int dev_num, enum mv_ddr_freq *freq)
388{
389 u32 reg, ref_clk_satr;
390
391
392 reg = (reg_read(REG_DEVICE_SAR1_ADDR) >>
393 RST2_CPU_DDR_CLOCK_SELECT_IN_OFFSET) &
394 RST2_CPU_DDR_CLOCK_SELECT_IN_MASK;
395
396 ref_clk_satr = reg_read(DEVICE_SAMPLE_AT_RESET2_REG);
397 if (((ref_clk_satr >> DEVICE_SAMPLE_AT_RESET2_REG_REFCLK_OFFSET) & 0x1) ==
398 DEVICE_SAMPLE_AT_RESET2_REG_REFCLK_25MHZ) {
399 switch (reg) {
400 case 0x1:
401 DEBUG_TRAINING_ACCESS(DEBUG_LEVEL_ERROR,
402 ("Warning: Unsupported freq mode for 333Mhz configured(%d)\n",
403 reg));
404
405 case 0x0:
406 *freq = MV_DDR_FREQ_333;
407 break;
408 case 0x3:
409 DEBUG_TRAINING_ACCESS(DEBUG_LEVEL_ERROR,
410 ("Warning: Unsupported freq mode for 400Mhz configured(%d)\n",
411 reg));
412
413 case 0x2:
414 *freq = MV_DDR_FREQ_400;
415 break;
416 case 0xd:
417 DEBUG_TRAINING_ACCESS(DEBUG_LEVEL_ERROR,
418 ("Warning: Unsupported freq mode for 533Mhz configured(%d)\n",
419 reg));
420
421 case 0x4:
422 *freq = MV_DDR_FREQ_533;
423 break;
424 case 0x6:
425 *freq = MV_DDR_FREQ_600;
426 break;
427 case 0x11:
428 case 0x14:
429 DEBUG_TRAINING_ACCESS(DEBUG_LEVEL_ERROR,
430 ("Warning: Unsupported freq mode for 667Mhz configured(%d)\n",
431 reg));
432
433 case 0x8:
434 *freq = MV_DDR_FREQ_667;
435 break;
436 case 0x15:
437 case 0x1b:
438 DEBUG_TRAINING_ACCESS(DEBUG_LEVEL_ERROR,
439 ("Warning: Unsupported freq mode for 800Mhz configured(%d)\n",
440 reg));
441
442 case 0xc:
443 *freq = MV_DDR_FREQ_800;
444 break;
445 case 0x10:
446 *freq = MV_DDR_FREQ_933;
447 break;
448 case 0x12:
449 *freq = MV_DDR_FREQ_900;
450 break;
451 case 0x13:
452 *freq = MV_DDR_FREQ_933;
453 break;
454 default:
455 *freq = 0;
456 return MV_NOT_SUPPORTED;
457 }
458 } else {
459 switch (reg) {
460 case 0x3:
461 *freq = MV_DDR_FREQ_400;
462 break;
463 case 0x5:
464 *freq = MV_DDR_FREQ_533;
465 break;
466 case 0xb:
467 *freq = MV_DDR_FREQ_800;
468 break;
469 case 0x1e:
470 *freq = MV_DDR_FREQ_900;
471 break;
472 default:
473 *freq = 0;
474 return MV_NOT_SUPPORTED;
475 }
476 }
477
478 return MV_OK;
479}
480
481static int ddr3_tip_a38x_get_medium_freq(int dev_num, enum mv_ddr_freq *freq)
482{
483 u32 reg, ref_clk_satr;
484
485
486 reg = (reg_read(REG_DEVICE_SAR1_ADDR) >>
487 RST2_CPU_DDR_CLOCK_SELECT_IN_OFFSET) &
488 RST2_CPU_DDR_CLOCK_SELECT_IN_MASK;
489
490 ref_clk_satr = reg_read(DEVICE_SAMPLE_AT_RESET2_REG);
491 if (((ref_clk_satr >> DEVICE_SAMPLE_AT_RESET2_REG_REFCLK_OFFSET) & 0x1) ==
492 DEVICE_SAMPLE_AT_RESET2_REG_REFCLK_25MHZ) {
493 switch (reg) {
494 case 0x0:
495 case 0x1:
496
497 *freq = MV_DDR_FREQ_333;
498 break;
499 case 0x2:
500 case 0x3:
501
502 *freq = MV_DDR_FREQ_400;
503 break;
504 case 0x4:
505 case 0xd:
506
507 *freq = MV_DDR_FREQ_533;
508 break;
509 case 0x8:
510 case 0x10:
511 case 0x11:
512 case 0x14:
513 *freq = MV_DDR_FREQ_333;
514 break;
515 case 0xc:
516 case 0x15:
517 case 0x1b:
518 *freq = MV_DDR_FREQ_400;
519 break;
520 case 0x6:
521 *freq = MV_DDR_FREQ_300;
522 break;
523 case 0x12:
524 *freq = MV_DDR_FREQ_360;
525 break;
526 case 0x13:
527 *freq = MV_DDR_FREQ_400;
528 break;
529 default:
530 *freq = 0;
531 return MV_NOT_SUPPORTED;
532 }
533 } else {
534 switch (reg) {
535 case 0x3:
536
537 *freq = MV_DDR_FREQ_400;
538 break;
539 case 0x5:
540
541 *freq = MV_DDR_FREQ_533;
542 break;
543 case 0xb:
544 *freq = MV_DDR_FREQ_400;
545 break;
546 case 0x1e:
547 *freq = MV_DDR_FREQ_360;
548 break;
549 default:
550 *freq = 0;
551 return MV_NOT_SUPPORTED;
552 }
553 }
554
555 return MV_OK;
556}
557
558static int ddr3_tip_a38x_get_device_info(u8 dev_num, struct ddr3_device_info *info_ptr)
559{
560 info_ptr->device_id = 0x6800;
561 info_ptr->ck_delay = ck_delay;
562
563 return MV_OK;
564}
565
566
567static int is_prfa_done(void)
568{
569 u32 reg_val;
570 u32 iter = 0;
571
572 do {
573 if (iter++ > MAX_POLLING_ITERATIONS) {
574 printf("error: %s: polling timeout\n", __func__);
575 return MV_FAIL;
576 }
577 dunit_read(PHY_REG_FILE_ACCESS_REG, MASK_ALL_BITS, ®_val);
578 reg_val >>= PRFA_REQ_OFFS;
579 reg_val &= PRFA_REQ_MASK;
580 } while (reg_val == PRFA_REQ_ENA);
581
582 return MV_OK;
583}
584
585
586static int prfa_write(enum hws_access_type phy_access, u32 phy,
587 enum hws_ddr_phy phy_type, u32 addr,
588 u32 data, enum hws_operation op_type)
589{
590 u32 reg_val = ((data & PRFA_DATA_MASK) << PRFA_DATA_OFFS) |
591 ((addr & PRFA_REG_NUM_MASK) << PRFA_REG_NUM_OFFS) |
592 ((phy & PRFA_PUP_NUM_MASK) << PRFA_PUP_NUM_OFFS) |
593 ((phy_type & PRFA_PUP_CTRL_DATA_MASK) << PRFA_PUP_CTRL_DATA_OFFS) |
594 ((phy_access & PRFA_PUP_BCAST_WR_ENA_MASK) << PRFA_PUP_BCAST_WR_ENA_OFFS) |
595 (((addr >> 6) & PRFA_REG_NUM_HI_MASK) << PRFA_REG_NUM_HI_OFFS) |
596 ((op_type & PRFA_TYPE_MASK) << PRFA_TYPE_OFFS);
597 dunit_write(PHY_REG_FILE_ACCESS_REG, MASK_ALL_BITS, reg_val);
598 reg_val |= (PRFA_REQ_ENA << PRFA_REQ_OFFS);
599 dunit_write(PHY_REG_FILE_ACCESS_REG, MASK_ALL_BITS, reg_val);
600
601
602 if (is_prfa_done() != MV_OK)
603 return MV_FAIL;
604
605 return MV_OK;
606}
607
608
609static int prfa_read(enum hws_access_type phy_access, u32 phy,
610 enum hws_ddr_phy phy_type, u32 addr, u32 *data)
611{
612 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
613 u32 max_phy = ddr3_tip_dev_attr_get(0, MV_ATTR_OCTET_PER_INTERFACE);
614 u32 i, reg_val;
615
616 if (phy_access == ACCESS_TYPE_MULTICAST) {
617 for (i = 0; i < max_phy; i++) {
618 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, i);
619 if (prfa_write(ACCESS_TYPE_UNICAST, i, phy_type, addr, 0, OPERATION_READ) != MV_OK)
620 return MV_FAIL;
621 dunit_read(PHY_REG_FILE_ACCESS_REG, MASK_ALL_BITS, ®_val);
622 data[i] = (reg_val >> PRFA_DATA_OFFS) & PRFA_DATA_MASK;
623 }
624 } else {
625 if (prfa_write(phy_access, phy, phy_type, addr, 0, OPERATION_READ) != MV_OK)
626 return MV_FAIL;
627 dunit_read(PHY_REG_FILE_ACCESS_REG, MASK_ALL_BITS, ®_val);
628 *data = (reg_val >> PRFA_DATA_OFFS) & PRFA_DATA_MASK;
629 }
630
631 return MV_OK;
632}
633
634static int mv_ddr_sw_db_init(u32 dev_num, u32 board_id)
635{
636 struct hws_tip_config_func_db config_func;
637
638
639 config_func.mv_ddr_dunit_read = dunit_read;
640 config_func.mv_ddr_dunit_write = dunit_write;
641 config_func.tip_dunit_mux_select_func =
642 ddr3_tip_a38x_select_ddr_controller;
643 config_func.tip_get_freq_config_info_func =
644 ddr3_tip_a38x_get_freq_config;
645 config_func.tip_set_freq_divider_func = ddr3_tip_a38x_set_divider;
646 config_func.tip_get_device_info_func = ddr3_tip_a38x_get_device_info;
647 config_func.tip_get_temperature = ddr3_ctrl_get_junc_temp;
648 config_func.tip_get_clock_ratio = ddr3_tip_clock_mode;
649 config_func.tip_external_read = ddr3_tip_ext_read;
650 config_func.tip_external_write = ddr3_tip_ext_write;
651 config_func.mv_ddr_phy_read = prfa_read;
652 config_func.mv_ddr_phy_write = prfa_write;
653
654 ddr3_tip_init_config_func(dev_num, &config_func);
655
656 ddr3_tip_register_dq_table(dev_num, dq_bit_map_2_phy_pin);
657
658
659 ddr3_tip_dev_attr_init(dev_num);
660 ddr3_tip_dev_attr_set(dev_num, MV_ATTR_TIP_REV, MV_TIP_REV_4);
661 ddr3_tip_dev_attr_set(dev_num, MV_ATTR_PHY_EDGE, MV_DDR_PHY_EDGE_POSITIVE);
662 ddr3_tip_dev_attr_set(dev_num, MV_ATTR_OCTET_PER_INTERFACE, DDR_INTERFACE_OCTETS_NUM);
663 ddr3_tip_dev_attr_set(dev_num, MV_ATTR_INTERLEAVE_WA, 0);
664
665 ca_delay = 0;
666 delay_enable = 1;
667 dfs_low_freq = DFS_LOW_FREQ_VALUE;
668 calibration_update_control = 1;
669
670 ddr3_tip_a38x_get_medium_freq(dev_num, &medium_freq);
671
672 return MV_OK;
673}
674
675static int mv_ddr_training_mask_set(void)
676{
677 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
678 enum mv_ddr_freq ddr_freq = tm->interface_params[0].memory_freq;
679
680 mask_tune_func = (SET_LOW_FREQ_MASK_BIT |
681 LOAD_PATTERN_MASK_BIT |
682 SET_MEDIUM_FREQ_MASK_BIT | WRITE_LEVELING_MASK_BIT |
683 WRITE_LEVELING_SUPP_MASK_BIT |
684 READ_LEVELING_MASK_BIT |
685 PBS_RX_MASK_BIT |
686 PBS_TX_MASK_BIT |
687 SET_TARGET_FREQ_MASK_BIT |
688 WRITE_LEVELING_TF_MASK_BIT |
689 WRITE_LEVELING_SUPP_TF_MASK_BIT |
690 READ_LEVELING_TF_MASK_BIT |
691 CENTRALIZATION_RX_MASK_BIT |
692 CENTRALIZATION_TX_MASK_BIT);
693 rl_mid_freq_wa = 1;
694
695 if ((ddr_freq == MV_DDR_FREQ_333) || (ddr_freq == MV_DDR_FREQ_400)) {
696 mask_tune_func = (WRITE_LEVELING_MASK_BIT |
697 LOAD_PATTERN_2_MASK_BIT |
698 WRITE_LEVELING_SUPP_MASK_BIT |
699 READ_LEVELING_MASK_BIT |
700 PBS_RX_MASK_BIT |
701 PBS_TX_MASK_BIT |
702 CENTRALIZATION_RX_MASK_BIT |
703 CENTRALIZATION_TX_MASK_BIT);
704 rl_mid_freq_wa = 0;
705 }
706
707
708 if (mv_ddr_is_ecc_ena()) {
709 mask_tune_func &= ~WRITE_LEVELING_SUPP_TF_MASK_BIT;
710 mask_tune_func &= ~WRITE_LEVELING_SUPP_MASK_BIT;
711 mask_tune_func &= ~PBS_TX_MASK_BIT;
712 mask_tune_func &= ~PBS_RX_MASK_BIT;
713 }
714
715 return MV_OK;
716}
717
718
719
720
721
722
723
724void mv_ddr_set_calib_controller(void)
725{
726 calibration_update_control = CAL_UPDATE_CTRL_INT;
727}
728
729static int ddr3_tip_a38x_set_divider(u8 dev_num, u32 if_id,
730 enum mv_ddr_freq frequency)
731{
732 u32 divider = 0;
733 u32 sar_val, ref_clk_satr;
734 u32 async_val;
735 u32 cpu_freq;
736 u32 ddr_freq = mv_ddr_freq_get(frequency);
737
738 if (if_id != 0) {
739 DEBUG_TRAINING_ACCESS(DEBUG_LEVEL_ERROR,
740 ("A38x does not support interface 0x%x\n",
741 if_id));
742 return MV_BAD_PARAM;
743 }
744
745
746 sar_val = (reg_read(REG_DEVICE_SAR1_ADDR) >>
747 RST2_CPU_DDR_CLOCK_SELECT_IN_OFFSET) &
748 RST2_CPU_DDR_CLOCK_SELECT_IN_MASK;
749
750 ref_clk_satr = reg_read(DEVICE_SAMPLE_AT_RESET2_REG);
751 if (((ref_clk_satr >> DEVICE_SAMPLE_AT_RESET2_REG_REFCLK_OFFSET) & 0x1) ==
752 DEVICE_SAMPLE_AT_RESET2_REG_REFCLK_25MHZ)
753 cpu_freq = a38x_vco_freq_per_sar_ref_clk_25_mhz[sar_val];
754 else
755 cpu_freq = a38x_vco_freq_per_sar_ref_clk_40_mhz[sar_val];
756
757 divider = cpu_freq / ddr_freq;
758
759 if (((cpu_freq % ddr_freq != 0) || (divider != 2 && divider != 3)) &&
760 (ddr_freq > 400)) {
761
762 dunit_write(0x20220, 0x1000, 0x1000);
763 dunit_write(0xe42f4, 0x200, 0x200);
764
765
766 mdelay(5);
767
768
769 switch (frequency) {
770 case MV_DDR_FREQ_467:
771 async_val = 0x806f012;
772 break;
773 case MV_DDR_FREQ_533:
774 async_val = 0x807f012;
775 break;
776 case MV_DDR_FREQ_600:
777 async_val = 0x805f00a;
778 break;
779 case MV_DDR_FREQ_667:
780 async_val = 0x809f012;
781 break;
782 case MV_DDR_FREQ_800:
783 async_val = 0x807f00a;
784 break;
785 case MV_DDR_FREQ_850:
786 async_val = 0x80cb012;
787 break;
788 case MV_DDR_FREQ_900:
789 async_val = 0x80d7012;
790 break;
791 case MV_DDR_FREQ_933:
792 async_val = 0x80df012;
793 break;
794 case MV_DDR_FREQ_1000:
795 async_val = 0x80ef012;
796 break;
797 case MV_DDR_FREQ_1066:
798 async_val = 0x80ff012;
799 break;
800 default:
801
802 async_val = 0x809f012;
803 }
804 dunit_write(0xe42f0, 0xffffffff, async_val);
805 } else {
806
807 dunit_write(0x20220, 0x1000, 0x0);
808 dunit_write(0xe42f4, 0x200, 0x0);
809
810
811 dunit_write(0xe4264, 0xff, 0x1f);
812
813
814 dunit_write(0xe4260, (0xff << 8), (0x2 << 8));
815
816
817 dunit_write(0xe4260, (0xff << 24), (0x2 << 24));
818
819
820 dunit_write(0xe4268, (0x3f << 8), (divider << 8));
821
822
823 dunit_write(0xe4264, (1 << 8), (1 << 8));
824
825
826 dunit_write(0xe4264, (1 << 8), 0x0);
827
828
829 dunit_write(0xe4260, (0xff << 8), 0x0);
830
831
832 dunit_write(0xe4260, (0xff << 24), 0x0);
833
834
835 dunit_write(0xe4264, 0xff, 0x0);
836 }
837
838
839 dunit_write(0x18488, (1 << 16), ((ddr3_tip_clock_mode(frequency) & 0x1) << 16));
840 dunit_write(0x1524, (1 << 15), ((ddr3_tip_clock_mode(frequency) - 1) << 15));
841
842 return MV_OK;
843}
844
845
846
847
848int ddr3_tip_ext_read(u32 dev_num, u32 if_id, u32 reg_addr,
849 u32 num_of_bursts, u32 *data)
850{
851 u32 burst_num;
852
853 for (burst_num = 0; burst_num < num_of_bursts * 8; burst_num++)
854 data[burst_num] = readl(reg_addr + 4 * burst_num);
855
856 return MV_OK;
857}
858
859
860
861
862int ddr3_tip_ext_write(u32 dev_num, u32 if_id, u32 reg_addr,
863 u32 num_of_bursts, u32 *data) {
864 u32 burst_num;
865
866 for (burst_num = 0; burst_num < num_of_bursts * 8; burst_num++)
867 writel(data[burst_num], reg_addr + 4 * burst_num);
868
869 return MV_OK;
870}
871
872int mv_ddr_early_init(void)
873{
874
875
876
877
878
879
880
881
882
883 mv_ddr_sw_db_init(0, 0);
884
885 return MV_OK;
886}
887
888int mv_ddr_early_init2(void)
889{
890 mv_ddr_training_mask_set();
891
892 return MV_OK;
893}
894
895int mv_ddr_pre_training_fixup(void)
896{
897 return 0;
898}
899
900int mv_ddr_post_training_fixup(void)
901{
902 return 0;
903}
904
905int ddr3_post_run_alg(void)
906{
907 return MV_OK;
908}
909
910int ddr3_silicon_post_init(void)
911{
912 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
913
914
915 if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask)) {
916 CHECK_STATUS(ddr3_tip_if_write
917 (0, ACCESS_TYPE_UNICAST, PARAM_NOT_CARE,
918 SDRAM_CFG_REG, 0x0, 0x8000));
919 }
920
921 return MV_OK;
922}
923
924u32 mv_ddr_init_freq_get(void)
925{
926 enum mv_ddr_freq freq;
927
928 mv_ddr_sar_freq_get(0, &freq);
929
930 return freq;
931}
932
933static u32 ddr3_get_bus_width(void)
934{
935 u32 bus_width;
936
937 bus_width = (reg_read(SDRAM_CFG_REG) & 0x8000) >>
938 BUS_IN_USE_OFFS;
939
940 return (bus_width == 0) ? 16 : 32;
941}
942
943static u32 ddr3_get_device_width(u32 cs)
944{
945 u32 device_width;
946
947 device_width = (reg_read(SDRAM_ADDR_CTRL_REG) &
948 (CS_STRUCT_MASK << CS_STRUCT_OFFS(cs))) >>
949 CS_STRUCT_OFFS(cs);
950
951 return (device_width == 0) ? 8 : 16;
952}
953
954static u32 ddr3_get_device_size(u32 cs)
955{
956 u32 device_size_low, device_size_high, device_size;
957 u32 data, cs_low_offset, cs_high_offset;
958
959 cs_low_offset = CS_SIZE_OFFS(cs);
960 cs_high_offset = CS_SIZE_HIGH_OFFS(cs);
961
962 data = reg_read(SDRAM_ADDR_CTRL_REG);
963 device_size_low = (data >> cs_low_offset) & 0x3;
964 device_size_high = (data >> cs_high_offset) & 0x1;
965
966 device_size = device_size_low | (device_size_high << 2);
967
968 switch (device_size) {
969 case 0:
970 return 2048;
971 case 2:
972 return 512;
973 case 3:
974 return 1024;
975 case 4:
976 return 4096;
977 case 5:
978 return 8192;
979 case 1:
980 default:
981 DEBUG_INIT_C("Error: Wrong device size of Cs: ", cs, 1);
982
983 return 0;
984 }
985}
986
987int ddr3_calc_mem_cs_size(u32 cs, uint64_t *cs_size)
988{
989 u32 cs_mem_size;
990
991
992 cs_mem_size = ((ddr3_get_bus_width() / ddr3_get_device_width(cs)) *
993 ddr3_get_device_size(cs)) / 8;
994
995
996
997
998
999
1000
1001 cs_mem_size *= DDR_CONTROLLER_BUS_WIDTH_MULTIPLIER;
1002
1003 if ((cs_mem_size < 128) || (cs_mem_size > 4096)) {
1004 DEBUG_INIT_C("Error: Wrong Memory size of Cs: ", cs, 1);
1005 return MV_BAD_VALUE;
1006 }
1007
1008 *cs_size = cs_mem_size;
1009
1010 return MV_OK;
1011}
1012
1013static int ddr3_fast_path_dynamic_cs_size_config(u32 cs_ena)
1014{
1015 u32 reg, cs;
1016 uint64_t mem_total_size = 0;
1017 uint64_t cs_mem_size_mb = 0;
1018 uint64_t cs_mem_size = 0;
1019 uint64_t mem_total_size_c, cs_mem_size_c;
1020
1021
1022#ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
1023 u32 physical_mem_size;
1024 u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
1025 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1026#endif
1027
1028
1029 for (cs = 0; cs < MAX_CS_NUM; cs++) {
1030 if (cs_ena & (1 << cs)) {
1031
1032 if (ddr3_calc_mem_cs_size(cs, &cs_mem_size_mb) != MV_OK)
1033 return MV_FAIL;
1034 cs_mem_size = cs_mem_size_mb * _1M;
1035
1036#ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
1037
1038
1039
1040
1041
1042 physical_mem_size = mem_size
1043 [tm->interface_params[0].memory_size];
1044
1045 if (ddr3_get_device_width(cs) == 16) {
1046
1047
1048
1049
1050 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
1051 }
1052
1053 if (physical_mem_size > max_mem_size) {
1054 cs_mem_size = max_mem_size *
1055 (ddr3_get_bus_width() /
1056 ddr3_get_device_width(cs));
1057 printf("Updated Physical Mem size is from 0x%x to %x\n",
1058 physical_mem_size,
1059 DEVICE_MAX_DRAM_ADDRESS_SIZE);
1060 }
1061#endif
1062
1063
1064 reg = 0xffffe1;
1065 reg |= (cs << 2);
1066 reg |= (cs_mem_size - 1) & 0xffff0000;
1067
1068 reg_write(REG_FASTPATH_WIN_CTRL_ADDR(cs), reg);
1069
1070
1071 reg = ((cs_mem_size) * cs) & 0xffff0000;
1072
1073 reg_write(REG_FASTPATH_WIN_BASE_ADDR(cs), reg);
1074
1075
1076
1077
1078
1079
1080
1081 mem_total_size_c = (mem_total_size >> 16) & 0xffffffffffff;
1082 cs_mem_size_c = (cs_mem_size >> 16) & 0xffffffffffff;
1083
1084
1085 if (mem_total_size_c + cs_mem_size_c < 0x10000)
1086 mem_total_size += cs_mem_size;
1087 else
1088 mem_total_size = L2_FILTER_FOR_MAX_MEMORY_SIZE;
1089 }
1090 }
1091
1092
1093 reg_write(ADDRESS_FILTERING_END_REGISTER, mem_total_size);
1094
1095 return MV_OK;
1096}
1097
1098static int ddr3_restore_and_set_final_windows(u32 *win, const char *ddr_type)
1099{
1100 u32 win_ctrl_reg, num_of_win_regs;
1101 u32 cs_ena = mv_ddr_sys_env_get_cs_ena_from_reg();
1102 u32 ui;
1103
1104 win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
1105 num_of_win_regs = 16;
1106
1107
1108 for (ui = 0; ui < num_of_win_regs; ui++)
1109 reg_write((win_ctrl_reg + 0x4 * ui), win[ui]);
1110
1111 printf("%s Training Sequence - Switching XBAR Window to FastPath Window\n",
1112 ddr_type);
1113
1114#if defined DYNAMIC_CS_SIZE_CONFIG
1115 if (ddr3_fast_path_dynamic_cs_size_config(cs_ena) != MV_OK)
1116 printf("ddr3_fast_path_dynamic_cs_size_config FAILED\n");
1117#else
1118 u32 reg, cs;
1119 reg = 0x1fffffe1;
1120 for (cs = 0; cs < MAX_CS_NUM; cs++) {
1121 if (cs_ena & (1 << cs)) {
1122 reg |= (cs << 2);
1123 break;
1124 }
1125 }
1126
1127 reg_write(REG_FASTPATH_WIN_CTRL_ADDR(0), reg);
1128#endif
1129
1130 return MV_OK;
1131}
1132
1133static int ddr3_save_and_set_training_windows(u32 *win)
1134{
1135 u32 cs_ena;
1136 u32 reg, tmp_count, cs, ui;
1137 u32 win_ctrl_reg, win_base_reg, win_remap_reg;
1138 u32 num_of_win_regs, win_jump_index;
1139 win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
1140 win_base_reg = REG_XBAR_WIN_4_BASE_ADDR;
1141 win_remap_reg = REG_XBAR_WIN_4_REMAP_ADDR;
1142 win_jump_index = 0x10;
1143 num_of_win_regs = 16;
1144 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1145
1146#ifdef DISABLE_L2_FILTERING_DURING_DDR_TRAINING
1147
1148
1149
1150
1151 reg_write(ADDRESS_FILTERING_END_REGISTER, 0);
1152#endif
1153
1154 cs_ena = tm->interface_params[0].as_bus_params[0].cs_bitmask;
1155
1156
1157
1158 reg_write(REG_XBAR_WIN_19_CTRL_ADDR, 0);
1159
1160
1161 for (ui = 0; ui < num_of_win_regs; ui++)
1162 win[ui] = reg_read(win_ctrl_reg + 0x4 * ui);
1163
1164
1165 reg = 0;
1166 tmp_count = 0;
1167 for (cs = 0; cs < MAX_CS_NUM; cs++) {
1168 if (cs_ena & (1 << cs)) {
1169 switch (cs) {
1170 case 0:
1171 reg = 0x0e00;
1172 break;
1173 case 1:
1174 reg = 0x0d00;
1175 break;
1176 case 2:
1177 reg = 0x0b00;
1178 break;
1179 case 3:
1180 reg = 0x0700;
1181 break;
1182 }
1183 reg |= (1 << 0);
1184 reg |= (SDRAM_CS_SIZE & 0xffff0000);
1185
1186 reg_write(win_ctrl_reg + win_jump_index * tmp_count,
1187 reg);
1188 reg = (((SDRAM_CS_SIZE + 1) * (tmp_count)) &
1189 0xffff0000);
1190 reg_write(win_base_reg + win_jump_index * tmp_count,
1191 reg);
1192
1193 if (win_remap_reg <= REG_XBAR_WIN_7_REMAP_ADDR)
1194 reg_write(win_remap_reg +
1195 win_jump_index * tmp_count, 0);
1196
1197 tmp_count++;
1198 }
1199 }
1200
1201 return MV_OK;
1202}
1203
1204static u32 win[16];
1205
1206int mv_ddr_pre_training_soc_config(const char *ddr_type)
1207{
1208 u32 soc_num;
1209 u32 reg_val;
1210
1211
1212 soc_num = (reg_read(REG_SAMPLE_RESET_HIGH_ADDR) & SAR1_CPU_CORE_MASK) >>
1213 SAR1_CPU_CORE_OFFSET;
1214 switch (soc_num) {
1215 case 0x3:
1216 reg_bit_set(CPU_CONFIGURATION_REG(3), CPU_MRVL_ID_OFFSET);
1217 reg_bit_set(CPU_CONFIGURATION_REG(2), CPU_MRVL_ID_OFFSET);
1218
1219 case 0x1:
1220 reg_bit_set(CPU_CONFIGURATION_REG(1), CPU_MRVL_ID_OFFSET);
1221
1222 case 0x0:
1223 reg_bit_set(CPU_CONFIGURATION_REG(0), CPU_MRVL_ID_OFFSET);
1224
1225 default:
1226 break;
1227 }
1228
1229
1230
1231
1232
1233
1234 if (mv_ddr_sys_env_suspend_wakeup_check() ==
1235 SUSPEND_WAKEUP_ENABLED_GPIO_DETECTED) {
1236 reg_bit_set(SDRAM_INIT_CTRL_REG,
1237 DRAM_RESET_MASK_MASKED << DRAM_RESET_MASK_OFFS);
1238 }
1239
1240
1241 if (reg_read(REG_BOOTROM_ROUTINE_ADDR) &
1242 (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS)) {
1243 printf("%s Training Sequence - 2nd boot - Skip\n", ddr_type);
1244 return MV_OK;
1245 }
1246
1247
1248 reg_val = reg_read(TRAINING_DBG_3_REG);
1249
1250 reg_val &= ~(TRN_DBG_RDY_INC_PH_2TO1_MASK << TRN_DBG_RDY_INC_PH_2TO1_OFFS(0));
1251 reg_val |= (0x4 << TRN_DBG_RDY_INC_PH_2TO1_OFFS(0));
1252
1253 reg_val &= ~(TRN_DBG_RDY_INC_PH_2TO1_MASK << TRN_DBG_RDY_INC_PH_2TO1_OFFS(1));
1254 reg_val |= (0x4 << TRN_DBG_RDY_INC_PH_2TO1_OFFS(1));
1255
1256 reg_val &= ~(TRN_DBG_RDY_INC_PH_2TO1_MASK << TRN_DBG_RDY_INC_PH_2TO1_OFFS(3));
1257 reg_val |= (0x6 << TRN_DBG_RDY_INC_PH_2TO1_OFFS(3));
1258
1259 reg_val &= ~(TRN_DBG_RDY_INC_PH_2TO1_MASK << TRN_DBG_RDY_INC_PH_2TO1_OFFS(4));
1260 reg_val |= (0x6 << TRN_DBG_RDY_INC_PH_2TO1_OFFS(4));
1261
1262 reg_val &= ~(TRN_DBG_RDY_INC_PH_2TO1_MASK << TRN_DBG_RDY_INC_PH_2TO1_OFFS(5));
1263 reg_val |= (0x6 << TRN_DBG_RDY_INC_PH_2TO1_OFFS(5));
1264
1265 reg_write(TRAINING_DBG_3_REG, reg_val);
1266
1267
1268
1269
1270
1271
1272
1273 reg_write(AXI_CTRL_REG, 0);
1274
1275
1276
1277
1278
1279 ddr3_save_and_set_training_windows(win);
1280
1281 return MV_OK;
1282}
1283
1284static int ddr3_new_tip_dlb_config(void)
1285{
1286 u32 reg, i = 0;
1287 struct dlb_config *config_table_ptr = sys_env_dlb_config_ptr_get();
1288
1289
1290 while (config_table_ptr[i].reg_addr != 0) {
1291 reg_write(config_table_ptr[i].reg_addr,
1292 config_table_ptr[i].reg_data);
1293 i++;
1294 }
1295
1296
1297
1298 reg = reg_read(DLB_CTRL_REG);
1299 reg &= ~(DLB_EN_MASK << DLB_EN_OFFS) &
1300 ~(WR_COALESCE_EN_MASK << WR_COALESCE_EN_OFFS) &
1301 ~(AXI_PREFETCH_EN_MASK << AXI_PREFETCH_EN_OFFS) &
1302 ~(MBUS_PREFETCH_EN_MASK << MBUS_PREFETCH_EN_OFFS) &
1303 ~(PREFETCH_NXT_LN_SZ_TRIG_MASK << PREFETCH_NXT_LN_SZ_TRIG_OFFS);
1304
1305 reg |= (DLB_EN_ENA << DLB_EN_OFFS) |
1306 (WR_COALESCE_EN_ENA << WR_COALESCE_EN_OFFS) |
1307 (AXI_PREFETCH_EN_ENA << AXI_PREFETCH_EN_OFFS) |
1308 (MBUS_PREFETCH_EN_ENA << MBUS_PREFETCH_EN_OFFS) |
1309 (PREFETCH_NXT_LN_SZ_TRIG_ENA << PREFETCH_NXT_LN_SZ_TRIG_OFFS);
1310
1311 reg_write(DLB_CTRL_REG, reg);
1312
1313 return MV_OK;
1314}
1315
1316int mv_ddr_post_training_soc_config(const char *ddr_type)
1317{
1318 u32 reg_val;
1319
1320
1321 ddr3_restore_and_set_final_windows(win, ddr_type);
1322
1323
1324 reg_val = reg_read(REG_BOOTROM_ROUTINE_ADDR);
1325 reg_write(REG_BOOTROM_ROUTINE_ADDR,
1326 reg_val | (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS));
1327
1328
1329 ddr3_new_tip_dlb_config();
1330
1331 return MV_OK;
1332}
1333
1334void mv_ddr_mc_config(void)
1335{
1336
1337 struct init_cntr_param init_param;
1338 int status;
1339
1340 init_param.do_mrs_phy = 1;
1341 init_param.is_ctrl64_bit = 0;
1342 init_param.init_phy = 1;
1343 init_param.msys_init = 1;
1344 status = hws_ddr3_tip_init_controller(0, &init_param);
1345 if (status != MV_OK)
1346 printf("DDR3 init controller - FAILED 0x%x\n", status);
1347
1348 status = mv_ddr_mc_init();
1349 if (status != MV_OK)
1350 printf("DDR3 init_sequence - FAILED 0x%x\n", status);
1351}
1352
1353
1354
1355int mv_ddr_mc_init(void)
1356{
1357 CHECK_STATUS(ddr3_tip_enable_init_sequence(0));
1358
1359 return MV_OK;
1360}
1361
1362
1363
1364
1365int ddr3_tip_configure_phy(u32 dev_num)
1366{
1367 u32 if_id, phy_id;
1368 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1369 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1370
1371 CHECK_STATUS(ddr3_tip_bus_write
1372 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1373 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
1374 PAD_ZRI_CAL_PHY_REG,
1375 ((0x7f & g_zpri_data) << 7 | (0x7f & g_znri_data))));
1376 CHECK_STATUS(ddr3_tip_bus_write
1377 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1378 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_CONTROL,
1379 PAD_ZRI_CAL_PHY_REG,
1380 ((0x7f & g_zpri_ctrl) << 7 | (0x7f & g_znri_ctrl))));
1381 CHECK_STATUS(ddr3_tip_bus_write
1382 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1383 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
1384 PAD_ODT_CAL_PHY_REG,
1385 ((0x3f & g_zpodt_data) << 6 | (0x3f & g_znodt_data))));
1386 CHECK_STATUS(ddr3_tip_bus_write
1387 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1388 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_CONTROL,
1389 PAD_ODT_CAL_PHY_REG,
1390 ((0x3f & g_zpodt_ctrl) << 6 | (0x3f & g_znodt_ctrl))));
1391
1392 CHECK_STATUS(ddr3_tip_bus_write
1393 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1394 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
1395 PAD_PRE_DISABLE_PHY_REG, 0));
1396 CHECK_STATUS(ddr3_tip_bus_write
1397 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1398 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
1399 CMOS_CONFIG_PHY_REG, 0));
1400 CHECK_STATUS(ddr3_tip_bus_write
1401 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1402 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_CONTROL,
1403 CMOS_CONFIG_PHY_REG, 0));
1404
1405 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1406
1407 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1408
1409 for (phy_id = 0;
1410 phy_id < octets_per_if_num;
1411 phy_id++) {
1412 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, phy_id);
1413
1414 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1415 (dev_num, ACCESS_TYPE_UNICAST,
1416 if_id, phy_id, DDR_PHY_DATA,
1417 PAD_CFG_PHY_REG,
1418 ((clamp_tbl[if_id] << 4) | vref_init_val),
1419 ((0x7 << 4) | 0x7)));
1420
1421 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1422 (dev_num, ACCESS_TYPE_UNICAST,
1423 if_id, phy_id, DDR_PHY_CONTROL,
1424 PAD_CFG_PHY_REG, 0x4, 0x7));
1425 }
1426 }
1427
1428 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_PHY_EDGE) ==
1429 MV_DDR_PHY_EDGE_POSITIVE)
1430 CHECK_STATUS(ddr3_tip_bus_write
1431 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1432 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1433 DDR_PHY_DATA, 0x90, 0x6002));
1434
1435
1436 return MV_OK;
1437}
1438
1439
1440int mv_ddr_manual_cal_do(void)
1441{
1442 return 0;
1443}
1444