1
2
3
4
5
6
7#include <common.h>
8#include <i2c.h>
9#include <spl.h>
10#include <asm/io.h>
11#include <asm/arch/cpu.h>
12#include <asm/arch/soc.h>
13
14#include "ddr3_init.h"
15
16u8 is_reg_dump = 0;
17u8 debug_pbs = DEBUG_LEVEL_ERROR;
18
19
20
21
22#ifndef SILENT_LIB
23
24u8 debug_training_static = DEBUG_LEVEL_ERROR;
25u8 debug_training = DEBUG_LEVEL_ERROR;
26u8 debug_leveling = DEBUG_LEVEL_ERROR;
27u8 debug_centralization = DEBUG_LEVEL_ERROR;
28u8 debug_training_ip = DEBUG_LEVEL_ERROR;
29u8 debug_training_bist = DEBUG_LEVEL_ERROR;
30u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
31u8 debug_training_access = DEBUG_LEVEL_ERROR;
32u8 debug_training_a38x = DEBUG_LEVEL_ERROR;
33
34void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
35{
36 switch (block) {
37 case DEBUG_BLOCK_STATIC:
38 debug_training_static = level;
39 break;
40 case DEBUG_BLOCK_TRAINING_MAIN:
41 debug_training = level;
42 break;
43 case DEBUG_BLOCK_LEVELING:
44 debug_leveling = level;
45 break;
46 case DEBUG_BLOCK_CENTRALIZATION:
47 debug_centralization = level;
48 break;
49 case DEBUG_BLOCK_PBS:
50 debug_pbs = level;
51 break;
52 case DEBUG_BLOCK_ALG:
53 debug_training_hw_alg = level;
54 break;
55 case DEBUG_BLOCK_DEVICE:
56 debug_training_a38x = level;
57 break;
58 case DEBUG_BLOCK_ACCESS:
59 debug_training_access = level;
60 break;
61 case DEBUG_STAGES_REG_DUMP:
62 if (level == DEBUG_LEVEL_TRACE)
63 is_reg_dump = 1;
64 else
65 is_reg_dump = 0;
66 break;
67 case DEBUG_BLOCK_ALL:
68 default:
69 debug_training_static = level;
70 debug_training = level;
71 debug_leveling = level;
72 debug_centralization = level;
73 debug_pbs = level;
74 debug_training_hw_alg = level;
75 debug_training_access = level;
76 debug_training_a38x = level;
77 }
78}
79#else
80void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
81{
82 return;
83}
84#endif
85
86struct hws_tip_config_func_db config_func_info[HWS_MAX_DEVICE_NUM];
87u8 is_default_centralization = 0;
88u8 is_tune_result = 0;
89u8 is_validate_window_per_if = 0;
90u8 is_validate_window_per_pup = 0;
91u8 sweep_cnt = 1;
92u32 is_bist_reset_bit = 1;
93static struct hws_xsb_info xsb_info[HWS_MAX_DEVICE_NUM];
94
95
96
97
98int ddr3_tip_reg_dump(u32 dev_num)
99{
100 u32 if_id, reg_addr, data_value, bus_id;
101 u32 read_data[MAX_INTERFACE_NUM];
102 struct hws_topology_map *tm = ddr3_get_topology_map();
103
104 printf("-- dunit registers --\n");
105 for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
106 printf("0x%x ", reg_addr);
107 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
108 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
109 CHECK_STATUS(ddr3_tip_if_read
110 (dev_num, ACCESS_TYPE_UNICAST,
111 if_id, reg_addr, read_data,
112 MASK_ALL_BITS));
113 printf("0x%x ", read_data[if_id]);
114 }
115 printf("\n");
116 }
117
118 printf("-- Phy registers --\n");
119 for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
120 printf("0x%x ", reg_addr);
121 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
122 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
123 for (bus_id = 0;
124 bus_id < tm->num_of_bus_per_interface;
125 bus_id++) {
126 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
127 CHECK_STATUS(ddr3_tip_bus_read
128 (dev_num, if_id,
129 ACCESS_TYPE_UNICAST, bus_id,
130 DDR_PHY_DATA, reg_addr,
131 &data_value));
132 printf("0x%x ", data_value);
133 }
134 for (bus_id = 0;
135 bus_id < tm->num_of_bus_per_interface;
136 bus_id++) {
137 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
138 CHECK_STATUS(ddr3_tip_bus_read
139 (dev_num, if_id,
140 ACCESS_TYPE_UNICAST, bus_id,
141 DDR_PHY_CONTROL, reg_addr,
142 &data_value));
143 printf("0x%x ", data_value);
144 }
145 }
146 printf("\n");
147 }
148
149 return MV_OK;
150}
151
152
153
154
155int ddr3_tip_init_config_func(u32 dev_num,
156 struct hws_tip_config_func_db *config_func)
157{
158 if (config_func == NULL)
159 return MV_BAD_PARAM;
160
161 memcpy(&config_func_info[dev_num], config_func,
162 sizeof(struct hws_tip_config_func_db));
163
164 return MV_OK;
165}
166
167
168
169
170enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
171{
172 return training_result[stage];
173}
174
175
176
177
178int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
179{
180 if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
181 return config_func_info[dev_num].
182 tip_get_device_info_func((u8) dev_num, info_ptr);
183 }
184
185 return MV_FAIL;
186}
187
188#ifndef EXCLUDE_SWITCH_DEBUG
189
190
191
192static char *convert_freq(enum hws_ddr_freq freq)
193{
194 switch (freq) {
195 case DDR_FREQ_LOW_FREQ:
196 return "DDR_FREQ_LOW_FREQ";
197 case DDR_FREQ_400:
198 return "400";
199
200 case DDR_FREQ_533:
201 return "533";
202 case DDR_FREQ_667:
203 return "667";
204
205 case DDR_FREQ_800:
206 return "800";
207
208 case DDR_FREQ_933:
209 return "933";
210
211 case DDR_FREQ_1066:
212 return "1066";
213 case DDR_FREQ_311:
214 return "311";
215
216 case DDR_FREQ_333:
217 return "333";
218
219 case DDR_FREQ_467:
220 return "467";
221
222 case DDR_FREQ_850:
223 return "850";
224
225 case DDR_FREQ_900:
226 return "900";
227
228 case DDR_FREQ_360:
229 return "DDR_FREQ_360";
230
231 case DDR_FREQ_1000:
232 return "DDR_FREQ_1000";
233 default:
234 return "Unknown Frequency";
235 }
236}
237
238
239
240
241static char *convert_dev_id(u32 dev_id)
242{
243 switch (dev_id) {
244 case 0x6800:
245 return "A38xx";
246 case 0x6900:
247 return "A39XX";
248 case 0xf400:
249 return "AC3";
250 case 0xfc00:
251 return "BC2";
252
253 default:
254 return "Unknown Device";
255 }
256}
257
258
259
260
261static char *convert_mem_size(u32 dev_id)
262{
263 switch (dev_id) {
264 case 0:
265 return "512 MB";
266 case 1:
267 return "1 GB";
268 case 2:
269 return "2 GB";
270 case 3:
271 return "4 GB";
272 case 4:
273 return "8 GB";
274
275 default:
276 return "wrong mem size";
277 }
278}
279
280int print_device_info(u8 dev_num)
281{
282 struct ddr3_device_info info_ptr;
283 struct hws_topology_map *tm = ddr3_get_topology_map();
284
285 CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
286 printf("=== DDR setup START===\n");
287 printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
288 printf("\tDDR3 CK delay: %d\n", info_ptr.ck_delay);
289 print_topology(tm);
290 printf("=== DDR setup END===\n");
291
292 return MV_OK;
293}
294
295void hws_ddr3_tip_sweep_test(int enable)
296{
297 if (enable) {
298 is_validate_window_per_if = 1;
299 is_validate_window_per_pup = 1;
300 debug_training = DEBUG_LEVEL_TRACE;
301 } else {
302 is_validate_window_per_if = 0;
303 is_validate_window_per_pup = 0;
304 }
305}
306#endif
307
308char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
309{
310 switch (tune_result) {
311 case TEST_FAILED:
312 return "FAILED";
313 case TEST_SUCCESS:
314 return "PASS";
315 case NO_TEST_DONE:
316 return "NOT COMPLETED";
317 default:
318 return "Un-KNOWN";
319 }
320}
321
322
323
324
325int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
326{
327 u32 if_id = 0;
328 struct hws_topology_map *tm = ddr3_get_topology_map();
329
330 mem_addr = mem_addr;
331
332#ifndef EXCLUDE_SWITCH_DEBUG
333 if ((is_validate_window_per_if != 0) ||
334 (is_validate_window_per_pup != 0)) {
335 u32 is_pup_log = 0;
336 enum hws_ddr_freq freq;
337
338 freq = tm->interface_params[first_active_if].memory_freq;
339
340 is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
341 printf("===VALIDATE WINDOW LOG START===\n");
342 printf("DDR Frequency: %s ======\n", convert_freq(freq));
343
344 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
345 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
346 ddr3_tip_print_all_pbs_result(dev_num);
347 ddr3_tip_print_wl_supp_result(dev_num);
348 printf("===VALIDATE WINDOW LOG END ===\n");
349 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
350 ddr3_tip_reg_dump(dev_num);
351 }
352#endif
353
354 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
355 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
356
357 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
358 ("IF %d Status:\n", if_id));
359
360 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
361 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
362 ("\tInit Controller: %s\n",
363 ddr3_tip_convert_tune_result
364 (training_result[INIT_CONTROLLER]
365 [if_id])));
366 }
367 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
368 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
369 ("\tLow freq Config: %s\n",
370 ddr3_tip_convert_tune_result
371 (training_result[SET_LOW_FREQ]
372 [if_id])));
373 }
374 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
375 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
376 ("\tLoad Pattern: %s\n",
377 ddr3_tip_convert_tune_result
378 (training_result[LOAD_PATTERN]
379 [if_id])));
380 }
381 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
382 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
383 ("\tMedium freq Config: %s\n",
384 ddr3_tip_convert_tune_result
385 (training_result[SET_MEDIUM_FREQ]
386 [if_id])));
387 }
388 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
389 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
390 ("\tWL: %s\n",
391 ddr3_tip_convert_tune_result
392 (training_result[WRITE_LEVELING]
393 [if_id])));
394 }
395 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
396 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
397 ("\tLoad Pattern: %s\n",
398 ddr3_tip_convert_tune_result
399 (training_result[LOAD_PATTERN_2]
400 [if_id])));
401 }
402 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
403 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
404 ("\tRL: %s\n",
405 ddr3_tip_convert_tune_result
406 (training_result[READ_LEVELING]
407 [if_id])));
408 }
409 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
410 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
411 ("\tWL Supp: %s\n",
412 ddr3_tip_convert_tune_result
413 (training_result[WRITE_LEVELING_SUPP]
414 [if_id])));
415 }
416 if (mask_tune_func & PBS_RX_MASK_BIT) {
417 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
418 ("\tPBS RX: %s\n",
419 ddr3_tip_convert_tune_result
420 (training_result[PBS_RX]
421 [if_id])));
422 }
423 if (mask_tune_func & PBS_TX_MASK_BIT) {
424 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
425 ("\tPBS TX: %s\n",
426 ddr3_tip_convert_tune_result
427 (training_result[PBS_TX]
428 [if_id])));
429 }
430 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
431 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
432 ("\tTarget freq Config: %s\n",
433 ddr3_tip_convert_tune_result
434 (training_result[SET_TARGET_FREQ]
435 [if_id])));
436 }
437 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
438 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
439 ("\tWL TF: %s\n",
440 ddr3_tip_convert_tune_result
441 (training_result[WRITE_LEVELING_TF]
442 [if_id])));
443 }
444 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
445 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
446 ("\tRL TF: %s\n",
447 ddr3_tip_convert_tune_result
448 (training_result[READ_LEVELING_TF]
449 [if_id])));
450 }
451 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
452 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
453 ("\tWL TF Supp: %s\n",
454 ddr3_tip_convert_tune_result
455 (training_result
456 [WRITE_LEVELING_SUPP_TF]
457 [if_id])));
458 }
459 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
460 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
461 ("\tCentr RX: %s\n",
462 ddr3_tip_convert_tune_result
463 (training_result[CENTRALIZATION_RX]
464 [if_id])));
465 }
466 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
467 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
468 ("\tVREF_CALIBRATION: %s\n",
469 ddr3_tip_convert_tune_result
470 (training_result[VREF_CALIBRATION]
471 [if_id])));
472 }
473 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
474 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
475 ("\tCentr TX: %s\n",
476 ddr3_tip_convert_tune_result
477 (training_result[CENTRALIZATION_TX]
478 [if_id])));
479 }
480 }
481
482 return MV_OK;
483}
484
485
486
487
488int ddr3_tip_print_stability_log(u32 dev_num)
489{
490 u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
491 u32 reg_data;
492 u32 read_data[MAX_INTERFACE_NUM];
493 u32 max_cs = hws_ddr3_tip_max_cs_get();
494 struct hws_topology_map *tm = ddr3_get_topology_map();
495
496
497 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
498 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
499 printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
500 for (csindex = 0; csindex < max_cs; csindex++) {
501 printf("CS%d , ", csindex);
502 printf("\n");
503 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
504 printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
505 printf("\t\t");
506 for (idx = 0; idx < 11; idx++)
507 printf("PBSTx-Pad%d,", idx);
508 printf("\t\t");
509 for (idx = 0; idx < 11; idx++)
510 printf("PBSRx-Pad%d,", idx);
511 }
512 }
513 printf("\n");
514
515
516 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
517 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
518
519 printf("Data: %d,%d,", if_id,
520 (config_func_info[dev_num].tip_get_temperature != NULL)
521 ? (config_func_info[dev_num].
522 tip_get_temperature(dev_num)) : (0));
523
524 CHECK_STATUS(ddr3_tip_if_read
525 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
526 read_data, MASK_ALL_BITS));
527 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
528 ((read_data[if_id] & 0xfc00) >> 10));
529 CHECK_STATUS(ddr3_tip_if_read
530 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
531 read_data, MASK_ALL_BITS));
532 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
533 ((read_data[if_id] & 0xfc00) >> 10));
534 CHECK_STATUS(ddr3_tip_if_read
535 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
536 read_data, MASK_ALL_BITS));
537 printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
538 ((read_data[if_id] & 0xfc00000) >> 22));
539
540 for (csindex = 0; csindex < max_cs; csindex++) {
541 printf("CS%d , ", csindex);
542 for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
543 printf("\n");
544 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
545 ddr3_tip_bus_read(dev_num, if_id,
546 ACCESS_TYPE_UNICAST,
547 bus_id, DDR_PHY_DATA,
548 RESULT_DB_PHY_REG_ADDR +
549 csindex, ®_data);
550 printf("%d,%d,", (reg_data & 0x1f),
551 ((reg_data & 0x3e0) >> 5));
552
553 ddr3_tip_bus_read(dev_num, if_id,
554 ACCESS_TYPE_UNICAST,
555 bus_id, DDR_PHY_DATA,
556 WL_PHY_REG +
557 csindex * 4, ®_data);
558 printf("%d,%d,%d,",
559 (reg_data & 0x1f) +
560 ((reg_data & 0x1c0) >> 6) * 32,
561 (reg_data & 0x1f),
562 (reg_data & 0x1c0) >> 6);
563
564 CHECK_STATUS(ddr3_tip_if_read
565 (dev_num, ACCESS_TYPE_UNICAST,
566 if_id,
567 READ_DATA_SAMPLE_DELAY,
568 read_data, MASK_ALL_BITS));
569 read_data[if_id] =
570 (read_data[if_id] &
571 (0xf << (4 * csindex))) >>
572 (4 * csindex);
573 ddr3_tip_bus_read(dev_num, if_id,
574 ACCESS_TYPE_UNICAST, bus_id,
575 DDR_PHY_DATA,
576 RL_PHY_REG + csindex * 4,
577 ®_data);
578 printf("%d,%d,%d,%d,",
579 (reg_data & 0x1f) +
580 ((reg_data & 0x1c0) >> 6) * 32 +
581 read_data[if_id] * 64,
582 (reg_data & 0x1f),
583 ((reg_data & 0x1c0) >> 6),
584 read_data[if_id]);
585
586 ddr3_tip_bus_read(dev_num, if_id,
587 ACCESS_TYPE_UNICAST, bus_id,
588 DDR_PHY_DATA,
589 WRITE_CENTRALIZATION_PHY_REG
590 + csindex * 4, ®_data);
591 printf("%d,", (reg_data & 0x3f));
592 ddr3_tip_bus_read(dev_num, if_id,
593 ACCESS_TYPE_UNICAST, bus_id,
594 DDR_PHY_DATA,
595 READ_CENTRALIZATION_PHY_REG
596 + csindex * 4, ®_data);
597 printf("%d,", (reg_data & 0x1f));
598
599 ddr3_tip_bus_read(dev_num, if_id,
600 ACCESS_TYPE_UNICAST, bus_id,
601 DDR_PHY_DATA,
602 PAD_CONFIG_PHY_REG,
603 ®_data);
604 printf("%d,", (reg_data & 0x7));
605
606
607 printf("%d,", 0);
608 printf("\t\t");
609 for (idx = 0; idx < 11; idx++) {
610 ddr3_tip_bus_read(dev_num, if_id,
611 ACCESS_TYPE_UNICAST,
612 bus_id, DDR_PHY_DATA,
613 0xd0 +
614 12 * csindex +
615 idx, ®_data);
616 printf("%d,", (reg_data & 0x3f));
617 }
618 printf("\t\t");
619 for (idx = 0; idx < 11; idx++) {
620 ddr3_tip_bus_read(dev_num, if_id,
621 ACCESS_TYPE_UNICAST,
622 bus_id, DDR_PHY_DATA,
623 0x10 +
624 16 * csindex +
625 idx, ®_data);
626 printf("%d,", (reg_data & 0x3f));
627 }
628 printf("\t\t");
629 for (idx = 0; idx < 11; idx++) {
630 ddr3_tip_bus_read(dev_num, if_id,
631 ACCESS_TYPE_UNICAST,
632 bus_id, DDR_PHY_DATA,
633 0x50 +
634 16 * csindex +
635 idx, ®_data);
636 printf("%d,", (reg_data & 0x3f));
637 }
638 }
639 }
640 }
641 printf("\n");
642
643 return MV_OK;
644}
645
646
647
648
649int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
650{
651 memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
652 return MV_OK;
653}
654
655
656
657
658int read_adll_value(u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
659 int reg_addr, u32 mask)
660{
661 u32 data_value;
662 u32 if_id = 0, bus_id = 0;
663 u32 dev_num = 0;
664 struct hws_topology_map *tm = ddr3_get_topology_map();
665
666
667
668
669
670 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
671 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
672 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
673 bus_id++) {
674 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
675 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
676 ACCESS_TYPE_UNICAST,
677 bus_id,
678 DDR_PHY_DATA, reg_addr,
679 &data_value));
680 pup_values[if_id *
681 tm->num_of_bus_per_interface + bus_id] =
682 data_value & mask;
683 }
684 }
685
686 return 0;
687}
688
689
690
691
692int write_adll_value(u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
693 int reg_addr)
694{
695 u32 if_id = 0, bus_id = 0;
696 u32 dev_num = 0, data;
697 struct hws_topology_map *tm = ddr3_get_topology_map();
698
699
700
701
702
703 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
704 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
705 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
706 bus_id++) {
707 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
708 data = pup_values[if_id *
709 tm->num_of_bus_per_interface +
710 bus_id];
711 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
712 ACCESS_TYPE_UNICAST,
713 if_id,
714 ACCESS_TYPE_UNICAST,
715 bus_id, DDR_PHY_DATA,
716 reg_addr, data));
717 }
718 }
719
720 return 0;
721}
722
723#ifndef EXCLUDE_SWITCH_DEBUG
724u32 rl_version = 1;
725struct hws_tip_config_func_db config_func_info[HWS_MAX_DEVICE_NUM];
726u32 start_xsb_offset = 0;
727u8 is_rl_old = 0;
728u8 is_freq_old = 0;
729u8 is_dfs_disabled = 0;
730u32 default_centrlization_value = 0x12;
731u32 vref = 0x4;
732u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
733 rl_test = 0, reset_read_fifo = 0;
734int debug_acc = 0;
735u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
736u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
737u8 cs_mask_reg[] = {
738 0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
739};
740
741u32 xsb_test_table[][8] = {
742 {0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
743 0x66666666, 0x77777777},
744 {0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
745 0xeeeeeeee, 0xffffffff},
746 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
747 0x00000000, 0xffffffff},
748 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
749 0x00000000, 0xffffffff},
750 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
751 0x00000000, 0xffffffff},
752 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
753 0x00000000, 0xffffffff},
754 {0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
755 0xffffffff, 0xffffffff},
756 {0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
757 0x00000000, 0x00000000},
758 {0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
759 0xffffffff, 0xffffffff}
760};
761
762static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr);
763
764int ddr3_tip_print_adll(void)
765{
766 u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
767 struct hws_topology_map *tm = ddr3_get_topology_map();
768
769 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
770 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
771 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
772 bus_cnt++) {
773 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
774 CHECK_STATUS(ddr3_tip_bus_read
775 (dev_num, if_id,
776 ACCESS_TYPE_UNICAST, bus_cnt,
777 DDR_PHY_DATA, 0x1, &data_p1));
778 CHECK_STATUS(ddr3_tip_bus_read
779 (dev_num, if_id, ACCESS_TYPE_UNICAST,
780 bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
781 CHECK_STATUS(ddr3_tip_bus_read
782 (dev_num, if_id, ACCESS_TYPE_UNICAST,
783 bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
784 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
785 (" IF %d bus_cnt %d phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
786 if_id, bus_cnt, data_p1, data_p2,
787 ui_data3));
788 }
789 }
790
791 return MV_OK;
792}
793
794
795
796
797int ddr3_tip_set_atr(u32 dev_num, u32 flag_id, u32 value)
798{
799 int ret;
800 u32 *ptr_flag = NULL;
801
802 ret = ddr3_tip_access_atr(dev_num, flag_id, value, &ptr_flag);
803 if (ptr_flag != NULL) {
804 printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x (was 0x%x)\n",
805 flag_id, value, *ptr_flag);
806 *ptr_flag = value;
807 } else {
808 printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x\n",
809 flag_id, value);
810 }
811
812 return ret;
813}
814
815
816
817
818static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr)
819{
820 u32 tmp_val = 0, if_id = 0, pup_id = 0;
821 struct hws_topology_map *tm = ddr3_get_topology_map();
822
823 dev_num = dev_num;
824 *ptr = NULL;
825
826 switch (flag_id) {
827 case 0:
828 *ptr = (u32 *)&(tm->if_act_mask);
829 break;
830
831 case 0x1:
832 *ptr = (u32 *)&mask_tune_func;
833 break;
834
835 case 0x2:
836 *ptr = (u32 *)&low_freq;
837 break;
838
839 case 0x3:
840 *ptr = (u32 *)&medium_freq;
841 break;
842
843 case 0x4:
844 *ptr = (u32 *)&generic_init_controller;
845 break;
846
847 case 0x5:
848 *ptr = (u32 *)&rl_version;
849 break;
850
851 case 0x8:
852 *ptr = (u32 *)&start_xsb_offset;
853 break;
854
855 case 0x20:
856 *ptr = (u32 *)&is_rl_old;
857 break;
858
859 case 0x21:
860 *ptr = (u32 *)&is_freq_old;
861 break;
862
863 case 0x23:
864 *ptr = (u32 *)&is_dfs_disabled;
865 break;
866
867 case 0x24:
868 *ptr = (u32 *)&is_pll_before_init;
869 break;
870
871 case 0x25:
872 *ptr = (u32 *)&is_adll_calib_before_init;
873 break;
874#ifdef STATIC_ALGO_SUPPORT
875 case 0x26:
876 *ptr = (u32 *)&(silicon_delay[0]);
877 break;
878
879 case 0x27:
880 *ptr = (u32 *)&wl_debug_delay;
881 break;
882#endif
883 case 0x28:
884 *ptr = (u32 *)&is_tune_result;
885 break;
886
887 case 0x29:
888 *ptr = (u32 *)&is_validate_window_per_if;
889 break;
890
891 case 0x2a:
892 *ptr = (u32 *)&is_validate_window_per_pup;
893 break;
894
895 case 0x30:
896 *ptr = (u32 *)&sweep_cnt;
897 break;
898
899 case 0x31:
900 *ptr = (u32 *)&is_bist_reset_bit;
901 break;
902
903 case 0x32:
904 *ptr = (u32 *)&is_dfs_in_init;
905 break;
906
907 case 0x33:
908 *ptr = (u32 *)&p_finger;
909 break;
910
911 case 0x34:
912 *ptr = (u32 *)&n_finger;
913 break;
914
915 case 0x35:
916 *ptr = (u32 *)&init_freq;
917 break;
918
919 case 0x36:
920 *ptr = (u32 *)&(freq_val[DDR_FREQ_LOW_FREQ]);
921 break;
922
923 case 0x37:
924 *ptr = (u32 *)&start_pattern;
925 break;
926
927 case 0x38:
928 *ptr = (u32 *)&end_pattern;
929 break;
930
931 case 0x39:
932 *ptr = (u32 *)&phy_reg0_val;
933 break;
934
935 case 0x4a:
936 *ptr = (u32 *)&phy_reg1_val;
937 break;
938
939 case 0x4b:
940 *ptr = (u32 *)&phy_reg2_val;
941 break;
942
943 case 0x4c:
944 *ptr = (u32 *)&phy_reg3_val;
945 break;
946
947 case 0x4e:
948 *ptr = (u32 *)&sweep_pattern;
949 break;
950
951 case 0x50:
952 *ptr = (u32 *)&is_rzq6;
953 break;
954
955 case 0x51:
956 *ptr = (u32 *)&znri_data_phy_val;
957 break;
958
959 case 0x52:
960 *ptr = (u32 *)&zpri_data_phy_val;
961 break;
962
963 case 0x53:
964 *ptr = (u32 *)&finger_test;
965 break;
966
967 case 0x54:
968 *ptr = (u32 *)&n_finger_start;
969 break;
970
971 case 0x55:
972 *ptr = (u32 *)&n_finger_end;
973 break;
974
975 case 0x56:
976 *ptr = (u32 *)&p_finger_start;
977 break;
978
979 case 0x57:
980 *ptr = (u32 *)&p_finger_end;
981 break;
982
983 case 0x58:
984 *ptr = (u32 *)&p_finger_step;
985 break;
986
987 case 0x59:
988 *ptr = (u32 *)&n_finger_step;
989 break;
990
991 case 0x5a:
992 *ptr = (u32 *)&znri_ctrl_phy_val;
993 break;
994
995 case 0x5b:
996 *ptr = (u32 *)&zpri_ctrl_phy_val;
997 break;
998
999 case 0x5c:
1000 *ptr = (u32 *)&is_reg_dump;
1001 break;
1002
1003 case 0x5d:
1004 *ptr = (u32 *)&vref;
1005 break;
1006
1007 case 0x5e:
1008 *ptr = (u32 *)&mode2_t;
1009 break;
1010
1011 case 0x5f:
1012 *ptr = (u32 *)&xsb_validate_type;
1013 break;
1014
1015 case 0x60:
1016 *ptr = (u32 *)&xsb_validation_base_address;
1017 break;
1018
1019 case 0x67:
1020 *ptr = (u32 *)&activate_select_before_run_alg;
1021 break;
1022
1023 case 0x68:
1024 *ptr = (u32 *)&activate_deselect_after_run_alg;
1025 break;
1026
1027 case 0x69:
1028 *ptr = (u32 *)&odt_additional;
1029 break;
1030
1031 case 0x70:
1032 *ptr = (u32 *)&debug_mode;
1033 break;
1034
1035 case 0x71:
1036 *ptr = (u32 *)&pbs_pattern;
1037 break;
1038
1039 case 0x72:
1040 *ptr = (u32 *)&delay_enable;
1041 break;
1042
1043 case 0x73:
1044 *ptr = (u32 *)&ck_delay;
1045 break;
1046
1047 case 0x74:
1048 *ptr = (u32 *)&ck_delay_16;
1049 break;
1050
1051 case 0x75:
1052 *ptr = (u32 *)&ca_delay;
1053 break;
1054
1055 case 0x100:
1056 *ptr = (u32 *)&debug_dunit;
1057 break;
1058
1059 case 0x101:
1060 debug_acc = (int)value;
1061 break;
1062
1063 case 0x102:
1064 debug_training = (u8)value;
1065 break;
1066
1067 case 0x103:
1068 debug_training_bist = (u8)value;
1069 break;
1070
1071 case 0x104:
1072 debug_centralization = (u8)value;
1073 break;
1074
1075 case 0x105:
1076 debug_training_ip = (u8)value;
1077 break;
1078
1079 case 0x106:
1080 debug_leveling = (u8)value;
1081 break;
1082
1083 case 0x107:
1084 debug_pbs = (u8)value;
1085 break;
1086
1087 case 0x108:
1088 debug_training_static = (u8)value;
1089 break;
1090
1091 case 0x109:
1092 debug_training_access = (u8)value;
1093 break;
1094
1095 case 0x112:
1096 *ptr = &start_pattern;
1097 break;
1098
1099 case 0x113:
1100 *ptr = &end_pattern;
1101 break;
1102
1103 default:
1104 if ((flag_id >= 0x200) && (flag_id < 0x210)) {
1105 if_id = flag_id - 0x200;
1106 *ptr = (u32 *)&(tm->interface_params
1107 [if_id].memory_freq);
1108 } else if ((flag_id >= 0x210) && (flag_id < 0x220)) {
1109 if_id = flag_id - 0x210;
1110 *ptr = (u32 *)&(tm->interface_params
1111 [if_id].speed_bin_index);
1112 } else if ((flag_id >= 0x220) && (flag_id < 0x230)) {
1113 if_id = flag_id - 0x220;
1114 *ptr = (u32 *)&(tm->interface_params
1115 [if_id].bus_width);
1116 } else if ((flag_id >= 0x230) && (flag_id < 0x240)) {
1117 if_id = flag_id - 0x230;
1118 *ptr = (u32 *)&(tm->interface_params
1119 [if_id].memory_size);
1120 } else if ((flag_id >= 0x240) && (flag_id < 0x250)) {
1121 if_id = flag_id - 0x240;
1122 *ptr = (u32 *)&(tm->interface_params
1123 [if_id].cas_l);
1124 } else if ((flag_id >= 0x250) && (flag_id < 0x260)) {
1125 if_id = flag_id - 0x250;
1126 *ptr = (u32 *)&(tm->interface_params
1127 [if_id].cas_wl);
1128 } else if ((flag_id >= 0x270) && (flag_id < 0x2cf)) {
1129 if_id = (flag_id - 0x270) / MAX_BUS_NUM;
1130 pup_id = (flag_id - 0x270) % MAX_BUS_NUM;
1131 *ptr = (u32 *)&(tm->interface_params[if_id].
1132 as_bus_params[pup_id].is_ck_swap);
1133 } else if ((flag_id >= 0x2d0) && (flag_id < 0x32f)) {
1134 if_id = (flag_id - 0x2d0) / MAX_BUS_NUM;
1135 pup_id = (flag_id - 0x2d0) % MAX_BUS_NUM;
1136 *ptr = (u32 *)&(tm->interface_params[if_id].
1137 as_bus_params[pup_id].is_dqs_swap);
1138 } else if ((flag_id >= 0x330) && (flag_id < 0x38f)) {
1139 if_id = (flag_id - 0x330) / MAX_BUS_NUM;
1140 pup_id = (flag_id - 0x330) % MAX_BUS_NUM;
1141 *ptr = (u32 *)&(tm->interface_params[if_id].
1142 as_bus_params[pup_id].cs_bitmask);
1143 } else if ((flag_id >= 0x390) && (flag_id < 0x3ef)) {
1144 if_id = (flag_id - 0x390) / MAX_BUS_NUM;
1145 pup_id = (flag_id - 0x390) % MAX_BUS_NUM;
1146 *ptr = (u32 *)&(tm->interface_params
1147 [if_id].as_bus_params
1148 [pup_id].mirror_enable_bitmask);
1149 } else if ((flag_id >= 0x500) && (flag_id <= 0x50f)) {
1150 tmp_val = flag_id - 0x320;
1151 *ptr = (u32 *)&(clamp_tbl[tmp_val]);
1152 } else {
1153 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1154 ("flag_id out of boundary %d\n",
1155 flag_id));
1156 return MV_BAD_PARAM;
1157 }
1158 }
1159
1160 return MV_OK;
1161}
1162
1163#ifndef EXCLUDE_SWITCH_DEBUG
1164
1165
1166
1167int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1168{
1169 u32 i, j;
1170 struct hws_topology_map *tm = ddr3_get_topology_map();
1171
1172 dev_num = dev_num;
1173
1174 for (j = 0; j < tm->num_of_bus_per_interface; j++) {
1175 VALIDATE_ACTIVE(tm->bus_act_mask, j);
1176 for (i = 0; i < MAX_INTERFACE_NUM; i++) {
1177 printf("%d ,",
1178 adll[i * tm->num_of_bus_per_interface + j]);
1179 }
1180 }
1181 printf("\n");
1182
1183 return MV_OK;
1184}
1185#endif
1186
1187
1188static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1189 u32 byte_index)
1190{
1191 u32 burst_cnt = 0, addr_offset, i_id;
1192 int b_is_fail = 0;
1193
1194 addr_offset =
1195 (byte_index ==
1196 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1197 for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1198 if ((p_src[burst_cnt] & addr_offset) !=
1199 (p_dst[burst_cnt] & addr_offset))
1200 b_is_fail = 1;
1201 }
1202
1203 if (b_is_fail == 1) {
1204 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1205 ("IF %d exp: ", if_id));
1206 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1207 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1208 ("0x%8x ", p_src[i_id]));
1209 }
1210 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1211 ("\n_i_f %d rcv: ", if_id));
1212 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1213 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1214 ("(0x%8x ", p_dst[i_id]));
1215 }
1216 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1217 }
1218
1219 return b_is_fail;
1220}
1221
1222
1223int ddr3_tip_sweep_test(u32 dev_num, u32 test_type,
1224 u32 mem_addr, u32 is_modify_adll,
1225 u32 start_if, u32 end_if, u32 startpup, u32 endpup)
1226{
1227 u32 bus_cnt = 0, adll_val = 0, if_id, ui_prev_adll, ui_mask_bit,
1228 end_adll, start_adll;
1229 u32 reg_addr = 0;
1230 struct hws_topology_map *tm = ddr3_get_topology_map();
1231
1232 mem_addr = mem_addr;
1233
1234 if (test_type == 0) {
1235 reg_addr = 1;
1236 ui_mask_bit = 0x3f;
1237 start_adll = 0;
1238 end_adll = ui_mask_bit;
1239 } else {
1240 reg_addr = 3;
1241 ui_mask_bit = 0x1f;
1242 start_adll = 0;
1243 end_adll = ui_mask_bit;
1244 }
1245
1246 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
1247 ("==============================\n"));
1248 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
1249 ("Test type %d (0-tx, 1-rx)\n", test_type));
1250
1251 for (if_id = start_if; if_id <= end_if; if_id++) {
1252 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1253 for (bus_cnt = startpup; bus_cnt < endpup; bus_cnt++) {
1254 CHECK_STATUS(ddr3_tip_bus_read
1255 (dev_num, if_id, ACCESS_TYPE_UNICAST,
1256 bus_cnt, DDR_PHY_DATA, reg_addr,
1257 &ui_prev_adll));
1258
1259 for (adll_val = start_adll; adll_val <= end_adll;
1260 adll_val++) {
1261 if (is_modify_adll == 1) {
1262 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1263 (dev_num,
1264 ACCESS_TYPE_UNICAST,
1265 if_id, bus_cnt,
1266 DDR_PHY_DATA, reg_addr,
1267 adll_val, ui_mask_bit));
1268 }
1269 }
1270 if (is_modify_adll == 1) {
1271 CHECK_STATUS(ddr3_tip_bus_write
1272 (dev_num, ACCESS_TYPE_UNICAST,
1273 if_id, ACCESS_TYPE_UNICAST,
1274 bus_cnt, DDR_PHY_DATA, reg_addr,
1275 ui_prev_adll));
1276 }
1277 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("\n"));
1278 }
1279 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("\n"));
1280 }
1281
1282 return MV_OK;
1283}
1284
1285#ifndef EXCLUDE_SWITCH_DEBUG
1286
1287
1288
1289int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1290 u32 mode)
1291{
1292 u32 pup = 0, start_pup = 0, end_pup = 0;
1293 u32 adll = 0;
1294 u32 res[MAX_INTERFACE_NUM] = { 0 };
1295 int if_id = 0;
1296 u32 adll_value = 0;
1297 int reg = (direction == 0) ? WRITE_CENTRALIZATION_PHY_REG :
1298 READ_CENTRALIZATION_PHY_REG;
1299 enum hws_access_type pup_access;
1300 u32 cs;
1301 u32 max_cs = hws_ddr3_tip_max_cs_get();
1302 struct hws_topology_map *tm = ddr3_get_topology_map();
1303
1304 repeat_num = repeat_num;
1305
1306 if (mode == 1) {
1307
1308 start_pup = 0;
1309 end_pup = tm->num_of_bus_per_interface - 1;
1310 pup_access = ACCESS_TYPE_UNICAST;
1311 } else {
1312 start_pup = 0;
1313 end_pup = 0;
1314 pup_access = ACCESS_TYPE_MULTICAST;
1315 }
1316
1317 for (cs = 0; cs < max_cs; cs++) {
1318 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1319 for (if_id = 0;
1320 if_id <= MAX_INTERFACE_NUM - 1;
1321 if_id++) {
1322 VALIDATE_ACTIVE
1323 (tm->if_act_mask,
1324 if_id);
1325 for (pup = start_pup; pup <= end_pup; pup++) {
1326 ctrl_sweepres[adll][if_id][pup] =
1327 0;
1328 }
1329 }
1330 }
1331
1332 for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1333 ctrl_adll[adll] = 0;
1334
1335 read_adll_value(ctrl_adll,
1336 (reg + (cs * CS_REGISTER_ADDR_OFFSET)),
1337 MASK_ALL_BITS);
1338
1339
1340
1341
1342
1343 for (pup = start_pup; pup <= end_pup; pup++) {
1344 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1345 adll_value =
1346 (direction == 0) ? (adll * 2) : adll;
1347 CHECK_STATUS(ddr3_tip_bus_write
1348 (dev_num, ACCESS_TYPE_MULTICAST, 0,
1349 pup_access, pup, DDR_PHY_DATA,
1350 reg + CS_REG_VALUE(cs),
1351 adll_value));
1352 hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1353 cs);
1354
1355 for (if_id = 0;
1356 if_id <= MAX_INTERFACE_NUM - 1;
1357 if_id++) {
1358 VALIDATE_ACTIVE
1359 (tm->if_act_mask,
1360 if_id);
1361 ctrl_sweepres[adll][if_id][pup]
1362 = res[if_id];
1363 if (mode == 1) {
1364 CHECK_STATUS
1365 (ddr3_tip_bus_write
1366 (dev_num,
1367 ACCESS_TYPE_UNICAST,
1368 if_id,
1369 ACCESS_TYPE_UNICAST,
1370 pup,
1371 DDR_PHY_DATA,
1372 reg + CS_REG_VALUE(cs),
1373 ctrl_adll[if_id *
1374 cs *
1375 tm->num_of_bus_per_interface
1376 + pup]));
1377 }
1378 }
1379 }
1380 }
1381 printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1382 ((direction == 0) ? "TX" : "RX"));
1383 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1384 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1385 if (mode == 1) {
1386 for (pup = start_pup; pup <= end_pup; pup++) {
1387 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
1388 printf("I/F%d-PHY%d , ", if_id, pup);
1389 }
1390 } else {
1391 printf("I/F%d , ", if_id);
1392 }
1393 }
1394 printf("\n");
1395
1396 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1397 adll_value = (direction == 0) ? (adll * 2) : adll;
1398 printf("Final,%s, Sweep, Result, %d ,",
1399 ((direction == 0) ? "TX" : "RX"), adll_value);
1400
1401 for (if_id = 0;
1402 if_id <= MAX_INTERFACE_NUM - 1;
1403 if_id++) {
1404 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1405 for (pup = start_pup; pup <= end_pup; pup++) {
1406 printf("%d , ",
1407 ctrl_sweepres[adll][if_id]
1408 [pup]);
1409 }
1410 }
1411 printf("\n");
1412 }
1413
1414
1415
1416
1417
1418 write_adll_value(ctrl_adll,
1419 (reg + cs * CS_REGISTER_ADDR_OFFSET));
1420
1421 read_adll_value(ctrl_adll, (reg + cs * CS_REGISTER_ADDR_OFFSET),
1422 MASK_ALL_BITS);
1423 printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1424 print_adll(dev_num, ctrl_adll);
1425 }
1426 ddr3_tip_reset_fifo_ptr(dev_num);
1427
1428 return 0;
1429}
1430
1431void print_topology(struct hws_topology_map *topology_db)
1432{
1433 u32 ui, uj;
1434
1435 printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
1436 printf("\tNum Bus: %d\n", topology_db->num_of_bus_per_interface);
1437 printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1438
1439 for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
1440 VALIDATE_ACTIVE(topology_db->if_act_mask, ui);
1441 printf("\n\tInterface ID: %d\n", ui);
1442 printf("\t\tDDR Frequency: %s\n",
1443 convert_freq(topology_db->
1444 interface_params[ui].memory_freq));
1445 printf("\t\tSpeed_bin: %d\n",
1446 topology_db->interface_params[ui].speed_bin_index);
1447 printf("\t\tBus_width: %d\n",
1448 (4 << topology_db->interface_params[ui].bus_width));
1449 printf("\t\tMem_size: %s\n",
1450 convert_mem_size(topology_db->
1451 interface_params[ui].memory_size));
1452 printf("\t\tCAS-WL: %d\n",
1453 topology_db->interface_params[ui].cas_wl);
1454 printf("\t\tCAS-L: %d\n",
1455 topology_db->interface_params[ui].cas_l);
1456 printf("\t\tTemperature: %d\n",
1457 topology_db->interface_params[ui].interface_temp);
1458 printf("\n");
1459 for (uj = 0; uj < 4; uj++) {
1460 printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1461 topology_db->interface_params[ui].
1462 as_bus_params[uj].cs_bitmask);
1463 printf("Mirror: 0x%x\t",
1464 topology_db->interface_params[ui].
1465 as_bus_params[uj].mirror_enable_bitmask);
1466 printf("DQS Swap is %s \t",
1467 (topology_db->
1468 interface_params[ui].as_bus_params[uj].
1469 is_dqs_swap == 1) ? "enabled" : "disabled");
1470 printf("Ck Swap:%s\t",
1471 (topology_db->
1472 interface_params[ui].as_bus_params[uj].
1473 is_ck_swap == 1) ? "enabled" : "disabled");
1474 printf("\n");
1475 }
1476 }
1477}
1478#endif
1479
1480
1481
1482
1483int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1484 u32 read_type, u32 burst_length)
1485{
1486 u32 seq = 0, if_id = 0, addr, cnt;
1487 int ret = MV_OK, ret_tmp;
1488 u32 data_read[MAX_INTERFACE_NUM];
1489 struct hws_topology_map *tm = ddr3_get_topology_map();
1490
1491 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1492 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1493 addr = mem_addr;
1494 for (cnt = 0; cnt <= burst_length; cnt++) {
1495 seq = (seq + 1) % 8;
1496 if (write_type != 0) {
1497 CHECK_STATUS(ddr3_tip_ext_write
1498 (dev_num, if_id, addr, 1,
1499 xsb_test_table[seq]));
1500 }
1501 if (read_type != 0) {
1502 CHECK_STATUS(ddr3_tip_ext_read
1503 (dev_num, if_id, addr, 1,
1504 data_read));
1505 }
1506 if ((read_type != 0) && (write_type != 0)) {
1507 ret_tmp =
1508 ddr3_tip_compare(if_id,
1509 xsb_test_table[seq],
1510 data_read,
1511 0xff);
1512 addr += (EXT_ACCESS_BURST_LENGTH * 4);
1513 ret = (ret != MV_OK) ? ret : ret_tmp;
1514 }
1515 }
1516 }
1517
1518 return ret;
1519}
1520
1521#else
1522
1523u32 rl_version = 1;
1524u32 vref = 0x4;
1525u32 start_xsb_offset = 0;
1526u8 cs_mask_reg[] = {
1527 0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
1528};
1529
1530int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1531 u32 read_type, u32 burst_length)
1532{
1533 return MV_OK;
1534}
1535
1536#endif
1537