1
2
3
4
5
6
7#include <common.h>
8#include <spl.h>
9#include <asm/io.h>
10#include <asm/arch/cpu.h>
11#include <asm/arch/soc.h>
12
13#include "ddr3_init.h"
14
15#define TYPICAL_PBS_VALUE 12
16
17u32 nominal_adll[MAX_INTERFACE_NUM * MAX_BUS_NUM];
18enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
19u8 result_mat[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
20u8 result_mat_rx_dqs[MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM];
21
22u8 result_all_bit[MAX_BUS_NUM * BUS_WIDTH_IN_BITS * MAX_INTERFACE_NUM];
23u8 max_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
24u8 min_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
25u8 max_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
26u8 min_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM];
27u32 pbsdelay_per_pup[NUM_OF_PBS_MODES][MAX_INTERFACE_NUM][MAX_BUS_NUM];
28u8 adll_shift_lock[MAX_INTERFACE_NUM][MAX_BUS_NUM];
29u8 adll_shift_val[MAX_INTERFACE_NUM][MAX_BUS_NUM];
30enum hws_pattern pbs_pattern = PATTERN_VREF;
31static u8 pup_state[MAX_INTERFACE_NUM][MAX_BUS_NUM];
32
33
34
35
36
37
38
39
40int ddr3_tip_pbs(u32 dev_num, enum pbs_dir pbs_mode)
41{
42 u32 res0[MAX_INTERFACE_NUM];
43 int adll_tap = MEGA / freq_val[medium_freq] / 64;
44 int pad_num = 0;
45 enum hws_search_dir search_dir =
46 (pbs_mode == PBS_RX_MODE) ? HWS_HIGH2LOW : HWS_LOW2HIGH;
47 enum hws_dir dir = (pbs_mode == PBS_RX_MODE) ? OPER_READ : OPER_WRITE;
48 int iterations = (pbs_mode == PBS_RX_MODE) ? 31 : 63;
49 u32 res_valid_mask = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
50 int init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
51 enum hws_edge_compare search_edge = EDGE_FP;
52 u32 pup = 0, bit = 0, if_id = 0, all_lock = 0, cs_num = 0;
53 int reg_addr = 0;
54 u32 validation_val = 0;
55 u32 cs_enable_reg_val[MAX_INTERFACE_NUM];
56 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
57 u8 temp = 0;
58 struct hws_topology_map *tm = ddr3_get_topology_map();
59
60
61 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
62 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
63
64
65 CHECK_STATUS(ddr3_tip_if_read
66 (dev_num, ACCESS_TYPE_UNICAST, if_id,
67 CS_ENABLE_REG, cs_enable_reg_val, MASK_ALL_BITS));
68
69
70 CHECK_STATUS(ddr3_tip_if_write
71 (dev_num, ACCESS_TYPE_UNICAST, if_id,
72 CS_ENABLE_REG, (1 << 3), (1 << 3)));
73 }
74
75 reg_addr = (pbs_mode == PBS_RX_MODE) ?
76 (READ_CENTRALIZATION_PHY_REG +
77 (effective_cs * CS_REGISTER_ADDR_OFFSET)) :
78 (WRITE_CENTRALIZATION_PHY_REG +
79 (effective_cs * CS_REGISTER_ADDR_OFFSET));
80 read_adll_value(nominal_adll, reg_addr, MASK_ALL_BITS);
81
82
83 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
84 PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
85 PARAM_NOT_CARE, RESULT_PER_BIT,
86 HWS_CONTROL_ELEMENT_ADLL, search_dir, dir,
87 tm->if_act_mask, init_val, iterations,
88 pbs_pattern, search_edge, CS_SINGLE, cs_num,
89 train_status);
90 validation_val = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0;
91 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
92 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
93 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
94 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
95 min_adll_per_pup[if_id][pup] =
96 (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
97 pup_state[if_id][pup] = 0x3;
98 adll_shift_lock[if_id][pup] = 1;
99 max_adll_per_pup[if_id][pup] = 0x0;
100 }
101 }
102
103
104 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
105 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
106 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
107 CHECK_STATUS(ddr3_tip_if_read
108 (dev_num, ACCESS_TYPE_MULTICAST,
109 PARAM_NOT_CARE,
110 mask_results_dq_reg_map[
111 bit + pup * BUS_WIDTH_IN_BITS],
112 res0, MASK_ALL_BITS));
113 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
114 if_id++) {
115 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
116 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
117 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
118 if_id, bit, pup,
119 res0[if_id]));
120 if (pup_state[if_id][pup] != 3)
121 continue;
122
123
124 if ((res0[if_id] & 0x2000000) == 0) {
125 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
126 ("-- Fail Training IP\n"));
127
128 pup_state[if_id][pup] = 1;
129 adll_shift_lock[if_id][pup] = 0;
130 continue;
131 }
132
133 else if ((res0[if_id] & res_valid_mask) ==
134 validation_val) {
135 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
136 ("-- FAIL EBA %d %d %d %d\n",
137 if_id, bit, pup,
138 res0[if_id]));
139 pup_state[if_id][pup] = 4;
140
141 adll_shift_lock[if_id][pup] = 0;
142 continue;
143 } else {
144
145
146
147
148 res0[if_id] =
149 (pbs_mode == PBS_RX_MODE) ?
150 ((res0[if_id] &
151 res_valid_mask) + 1) :
152 ((res0[if_id] &
153 res_valid_mask) - 1);
154 max_adll_per_pup[if_id][pup] =
155 (max_adll_per_pup[if_id][pup] <
156 res0[if_id]) ?
157 (u8)res0[if_id] :
158 max_adll_per_pup[if_id][pup];
159 min_adll_per_pup[if_id][pup] =
160 (res0[if_id] >
161 min_adll_per_pup[if_id][pup]) ?
162 min_adll_per_pup[if_id][pup] :
163 (u8)
164 res0[if_id];
165
166
167
168
169
170 adll_shift_val[if_id][pup] =
171 (pbs_mode == PBS_RX_MODE) ?
172 max_adll_per_pup[if_id][pup] :
173 min_adll_per_pup[if_id][pup];
174 }
175 }
176 }
177 }
178
179
180 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
181 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
182 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
183 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
184
185 if (pup_state[if_id][pup] != 4)
186 continue;
187
188
189
190
191 reg_addr = (pbs_mode == PBS_RX_MODE) ?
192 (0x54 + effective_cs * 0x10) :
193 (0x14 + effective_cs * 0x10);
194 CHECK_STATUS(ddr3_tip_bus_write
195 (dev_num, ACCESS_TYPE_UNICAST, if_id,
196 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
197 reg_addr, 0x1f));
198 reg_addr = (pbs_mode == PBS_RX_MODE) ?
199 (0x55 + effective_cs * 0x10) :
200 (0x15 + effective_cs * 0x10);
201 CHECK_STATUS(ddr3_tip_bus_write
202 (dev_num, ACCESS_TYPE_UNICAST, if_id,
203 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
204 reg_addr, 0x1f));
205
206 adll_shift_val[if_id][pup] = 0;
207 min_adll_per_pup[if_id][pup] =
208 (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f;
209 max_adll_per_pup[if_id][pup] = 0x0;
210
211 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
212 PARAM_NOT_CARE,
213 ACCESS_TYPE_MULTICAST,
214 PARAM_NOT_CARE, RESULT_PER_BIT,
215 HWS_CONTROL_ELEMENT_ADLL,
216 search_dir, dir,
217 tm->if_act_mask, init_val,
218 iterations, pbs_pattern,
219 search_edge, CS_SINGLE, cs_num,
220 train_status);
221 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
222 ("ADLL shift results:\n"));
223
224 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
225 CHECK_STATUS(ddr3_tip_if_read
226 (dev_num, ACCESS_TYPE_MULTICAST,
227 PARAM_NOT_CARE,
228 mask_results_dq_reg_map[
229 bit + pup *
230 BUS_WIDTH_IN_BITS],
231 res0, MASK_ALL_BITS));
232 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
233 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
234 if_id, bit, pup,
235 res0[if_id]));
236
237 if ((res0[if_id] & 0x2000000) == 0) {
238 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
239 (" -- EEBA Fail\n"));
240 bit = BUS_WIDTH_IN_BITS;
241
242 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
243 ("-- EEBA Fail Training IP\n"));
244
245
246
247
248
249 pup_state[if_id][pup] = 2;
250 adll_shift_lock[if_id][pup] = 0;
251 reg_addr = (pbs_mode == PBS_RX_MODE) ?
252 (0x54 + effective_cs * 0x10) :
253 (0x14 + effective_cs * 0x10);
254 CHECK_STATUS(ddr3_tip_bus_write
255 (dev_num,
256 ACCESS_TYPE_UNICAST,
257 if_id,
258 ACCESS_TYPE_UNICAST, pup,
259 DDR_PHY_DATA, reg_addr,
260 0x0));
261 reg_addr = (pbs_mode == PBS_RX_MODE) ?
262 (0x55 + effective_cs * 0x10) :
263 (0x15 + effective_cs * 0x10);
264 CHECK_STATUS(ddr3_tip_bus_write
265 (dev_num,
266 ACCESS_TYPE_UNICAST,
267 if_id,
268 ACCESS_TYPE_UNICAST, pup,
269 DDR_PHY_DATA, reg_addr,
270 0x0));
271 continue;
272 } else if ((res0[if_id] & res_valid_mask) ==
273 validation_val) {
274
275 bit = BUS_WIDTH_IN_BITS;
276 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
277 ("-- FAIL EEBA\n"));
278
279 pup_state[if_id][pup] = 2;
280 adll_shift_lock[if_id][pup] = 0;
281 reg_addr = (pbs_mode == PBS_RX_MODE) ?
282 (0x54 + effective_cs * 0x10) :
283 (0x14 + effective_cs * 0x10);
284 CHECK_STATUS(ddr3_tip_bus_write
285 (dev_num,
286 ACCESS_TYPE_UNICAST,
287 if_id,
288 ACCESS_TYPE_UNICAST, pup,
289 DDR_PHY_DATA, reg_addr,
290 0x0));
291 reg_addr = (pbs_mode == PBS_RX_MODE) ?
292 (0x55 + effective_cs * 0x10) :
293 (0x15 + effective_cs * 0x10);
294 CHECK_STATUS(ddr3_tip_bus_write
295 (dev_num,
296 ACCESS_TYPE_UNICAST,
297 if_id,
298 ACCESS_TYPE_UNICAST, pup,
299 DDR_PHY_DATA, reg_addr,
300 0x0));
301 continue;
302 } else {
303 adll_shift_lock[if_id][pup] = 1;
304
305
306
307
308 res0[if_id] =
309 (pbs_mode == PBS_RX_MODE) ?
310 ((res0[if_id] &
311 res_valid_mask) + 1) :
312 ((res0[if_id] &
313 res_valid_mask) - 1);
314 max_adll_per_pup[if_id][pup] =
315 (max_adll_per_pup[if_id][pup] <
316 res0[if_id]) ?
317 (u8)res0[if_id] :
318 max_adll_per_pup[if_id][pup];
319 min_adll_per_pup[if_id][pup] =
320 (res0[if_id] >
321 min_adll_per_pup[if_id][pup]) ?
322 min_adll_per_pup[if_id][pup] :
323 (u8)res0[if_id];
324
325
326
327
328
329 adll_shift_val[if_id][pup] =
330 (pbs_mode == PBS_RX_MODE) ?
331 max_adll_per_pup[if_id][pup] :
332 min_adll_per_pup[if_id][pup];
333 }
334 }
335 }
336 }
337
338
339 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
340 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
341 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
342 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
343 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
344 ("FP I/F %d, ADLL Shift for EBA: pup[%d] Lock status = %d Lock Val = %d,%d\n",
345 if_id, pup,
346 adll_shift_lock[if_id][pup],
347 max_adll_per_pup[if_id][pup],
348 min_adll_per_pup[if_id][pup]));
349 }
350 }
351 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
352 ("Update ADLL Shift of all pups:\n"));
353
354 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
355 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
356 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
357 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
358 if (adll_shift_lock[if_id][pup] != 1)
359 continue;
360
361
362 reg_addr = (pbs_mode == PBS_RX_MODE) ?
363 (0x3 + effective_cs * 4) :
364 (0x1 + effective_cs * 4);
365 CHECK_STATUS(ddr3_tip_bus_write
366 (dev_num, ACCESS_TYPE_UNICAST, if_id,
367 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA,
368 reg_addr, adll_shift_val[if_id][pup]));
369 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE,
370 ("FP I/F %d, Pup[%d] = %d\n", if_id,
371 pup, adll_shift_val[if_id][pup]));
372 }
373 }
374
375
376
377 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
378 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
379 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
380 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
381 max_pbs_per_pup[if_id][pup] = 0x0;
382 min_pbs_per_pup[if_id][pup] = 0x1f;
383 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
384
385 result_all_bit[bit + pup * BUS_WIDTH_IN_BITS +
386 if_id * MAX_BUS_NUM *
387 BUS_WIDTH_IN_BITS] = 0;
388 }
389 }
390 }
391
392 iterations = 31;
393 search_dir = HWS_LOW2HIGH;
394
395 init_val = 0;
396
397 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
398 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
399 RESULT_PER_BIT, HWS_CONTROL_ELEMENT_DQ_SKEW,
400 search_dir, dir, tm->if_act_mask, init_val,
401 iterations, pbs_pattern, search_edge,
402 CS_SINGLE, cs_num, train_status);
403
404 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
405 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
406 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
407 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
408 if (adll_shift_lock[if_id][pup] != 1) {
409
410 continue;
411 }
412
413 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
414 CHECK_STATUS(ddr3_tip_if_read
415 (dev_num, ACCESS_TYPE_MULTICAST,
416 PARAM_NOT_CARE,
417 mask_results_dq_reg_map[
418 bit +
419 pup * BUS_WIDTH_IN_BITS],
420 res0, MASK_ALL_BITS));
421 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
422 ("Per Bit Skew search, FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
423 if_id, bit, pup,
424 res0[if_id]));
425 if ((res0[if_id] & 0x2000000) == 0) {
426 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
427 ("--EBA PBS Fail - Training IP machine\n"));
428
429 bit = BUS_WIDTH_IN_BITS;
430
431
432
433
434 adll_shift_lock[if_id][pup] = 0;
435
436 pup_state[if_id][pup] = 2;
437 max_pbs_per_pup[if_id][pup] = 0x0;
438 min_pbs_per_pup[if_id][pup] = 0x1f;
439 continue;
440 } else {
441 temp = (u8)(res0[if_id] &
442 res_valid_mask);
443 max_pbs_per_pup[if_id][pup] =
444 (temp >
445 max_pbs_per_pup[if_id][pup]) ?
446 temp :
447 max_pbs_per_pup[if_id][pup];
448 min_pbs_per_pup[if_id][pup] =
449 (temp <
450 min_pbs_per_pup[if_id][pup]) ?
451 temp :
452 min_pbs_per_pup[if_id][pup];
453 result_all_bit[bit +
454 pup * BUS_WIDTH_IN_BITS +
455 if_id * MAX_BUS_NUM *
456 BUS_WIDTH_IN_BITS] =
457 temp;
458 }
459 }
460 }
461 }
462
463
464 all_lock = 1;
465 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
466 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
467 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
468 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
469 all_lock = all_lock * adll_shift_lock[if_id][pup];
470 }
471 }
472
473
474 if (all_lock == 0) {
475 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
476 ("##########ADLL shift for SBA###########\n"));
477
478
479 search_dir = (pbs_mode == PBS_RX_MODE) ? HWS_LOW2HIGH :
480 HWS_HIGH2LOW;
481 init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations;
482 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
483 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
484 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
485 if_id++) {
486 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
487 if (adll_shift_lock[if_id][pup] == 1) {
488
489 continue;
490 }
491
492 adll_shift_lock[if_id][pup] = 0;
493 reg_addr = (pbs_mode == PBS_RX_MODE) ?
494 (0x54 + effective_cs * 0x10) :
495 (0x14 + effective_cs * 0x10);
496 CHECK_STATUS(ddr3_tip_bus_write
497 (dev_num, ACCESS_TYPE_UNICAST,
498 if_id, ACCESS_TYPE_UNICAST, pup,
499 DDR_PHY_DATA, reg_addr, 0));
500 reg_addr = (pbs_mode == PBS_RX_MODE) ?
501 (0x55 + effective_cs * 0x10) :
502 (0x15 + effective_cs * 0x10);
503 CHECK_STATUS(ddr3_tip_bus_write
504 (dev_num, ACCESS_TYPE_UNICAST,
505 if_id, ACCESS_TYPE_UNICAST, pup,
506 DDR_PHY_DATA, reg_addr, 0));
507 reg_addr = (pbs_mode == PBS_RX_MODE) ?
508 (0x5f + effective_cs * 0x10) :
509 (0x1f + effective_cs * 0x10);
510 CHECK_STATUS(ddr3_tip_bus_write
511 (dev_num, ACCESS_TYPE_UNICAST,
512 if_id, ACCESS_TYPE_UNICAST, pup,
513 DDR_PHY_DATA, reg_addr, 0));
514
515 adll_shift_val[if_id][pup] = 0;
516 min_adll_per_pup[if_id][pup] = 0x1f;
517 max_adll_per_pup[if_id][pup] = 0x0;
518
519 ddr3_tip_ip_training(dev_num,
520 ACCESS_TYPE_MULTICAST,
521 PARAM_NOT_CARE,
522 ACCESS_TYPE_MULTICAST,
523 PARAM_NOT_CARE,
524 RESULT_PER_BIT,
525 HWS_CONTROL_ELEMENT_ADLL,
526 search_dir, dir,
527 tm->if_act_mask,
528 init_val, iterations,
529 pbs_pattern,
530 search_edge, CS_SINGLE,
531 cs_num, train_status);
532
533 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
534 CHECK_STATUS(ddr3_tip_if_read
535 (dev_num,
536 ACCESS_TYPE_MULTICAST,
537 PARAM_NOT_CARE,
538 mask_results_dq_reg_map
539 [bit +
540 pup *
541 BUS_WIDTH_IN_BITS],
542 res0, MASK_ALL_BITS));
543 DEBUG_PBS_ENGINE(
544 DEBUG_LEVEL_INFO,
545 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n",
546 if_id, bit, pup, res0[if_id]));
547 if ((res0[if_id] & 0x2000000) == 0) {
548
549 bit = BUS_WIDTH_IN_BITS;
550
551 pup_state[if_id][pup] = 1;
552 DEBUG_PBS_ENGINE
553 (DEBUG_LEVEL_INFO,
554 (" SBA Fail\n"));
555 continue;
556 } else {
557
558
559
560
561 adll_shift_lock[if_id][pup]++;
562
563
564
565
566 res0[if_id] =
567 (pbs_mode == PBS_RX_MODE) ?
568 ((res0[if_id] & res_valid_mask) + 1) :
569 ((res0[if_id] & res_valid_mask) - 1);
570 max_adll_per_pup[if_id][pup] =
571 (max_adll_per_pup[if_id]
572 [pup] < res0[if_id]) ?
573 (u8)res0[if_id] :
574 max_adll_per_pup[if_id][pup];
575 min_adll_per_pup[if_id][pup] =
576 (res0[if_id] >
577 min_adll_per_pup[if_id]
578 [pup]) ?
579 min_adll_per_pup[if_id][pup] :
580 (u8)res0[if_id];
581
582
583
584
585
586 adll_shift_val[if_id][pup] =
587 (pbs_mode == PBS_RX_MODE) ?
588 max_adll_per_pup[if_id][pup] :
589 min_adll_per_pup[if_id][pup];
590 }
591 }
592
593 adll_shift_lock[if_id][pup] =
594 (adll_shift_lock[if_id][pup] == 8) ?
595 1 : 0;
596 reg_addr = (pbs_mode == PBS_RX_MODE) ?
597 (0x3 + effective_cs * 4) :
598 (0x1 + effective_cs * 4);
599 CHECK_STATUS(ddr3_tip_bus_write
600 (dev_num, ACCESS_TYPE_UNICAST,
601 if_id, ACCESS_TYPE_UNICAST, pup,
602 DDR_PHY_DATA, reg_addr,
603 adll_shift_val[if_id][pup]));
604 DEBUG_PBS_ENGINE(
605 DEBUG_LEVEL_INFO,
606 ("adll_shift_lock[%x][%x] = %x\n",
607 if_id, pup,
608 adll_shift_lock[if_id][pup]));
609 }
610 }
611
612
613
614
615 search_edge = (pbs_mode == PBS_RX_MODE) ? EDGE_PF : EDGE_FP;
616 search_dir = (pbs_mode == PBS_RX_MODE) ?
617 HWS_LOW2HIGH : HWS_HIGH2LOW;
618 iterations = 0x1f;
619
620 init_val = (pbs_mode == PBS_RX_MODE) ? 0 : iterations;
621
622 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST,
623 PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST,
624 PARAM_NOT_CARE, RESULT_PER_BIT,
625 HWS_CONTROL_ELEMENT_DQ_SKEW,
626 search_dir, dir, tm->if_act_mask,
627 init_val, iterations, pbs_pattern,
628 search_edge, CS_SINGLE, cs_num,
629 train_status);
630
631 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
632 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
633 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
634 if_id++) {
635 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
636 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
637 CHECK_STATUS(ddr3_tip_if_read
638 (dev_num,
639 ACCESS_TYPE_MULTICAST,
640 PARAM_NOT_CARE,
641 mask_results_dq_reg_map
642 [bit +
643 pup *
644 BUS_WIDTH_IN_BITS],
645 res0, MASK_ALL_BITS));
646 if (pup_state[if_id][pup] != 2) {
647
648
649
650
651 bit = BUS_WIDTH_IN_BITS;
652 continue;
653 }
654 DEBUG_PBS_ENGINE(
655 DEBUG_LEVEL_INFO,
656 ("Per Bit Skew search, PF I/F %d, bit:%d, pup:%d res0 0x%x\n",
657 if_id, bit, pup, res0[if_id]));
658 if ((res0[if_id] & 0x2000000) == 0) {
659 DEBUG_PBS_ENGINE
660 (DEBUG_LEVEL_INFO,
661 ("SBA Fail\n"));
662
663 max_pbs_per_pup[if_id][pup] =
664 0x1f;
665 result_all_bit[
666 bit + pup *
667 BUS_WIDTH_IN_BITS +
668 if_id * MAX_BUS_NUM *
669 BUS_WIDTH_IN_BITS] =
670 0x1f;
671 } else {
672 temp = (u8)(res0[if_id] &
673 res_valid_mask);
674 max_pbs_per_pup[if_id][pup] =
675 (temp >
676 max_pbs_per_pup[if_id]
677 [pup]) ? temp :
678 max_pbs_per_pup
679 [if_id][pup];
680 min_pbs_per_pup[if_id][pup] =
681 (temp <
682 min_pbs_per_pup[if_id]
683 [pup]) ? temp :
684 min_pbs_per_pup
685 [if_id][pup];
686 result_all_bit[
687 bit + pup *
688 BUS_WIDTH_IN_BITS +
689 if_id * MAX_BUS_NUM *
690 BUS_WIDTH_IN_BITS] =
691 temp;
692 adll_shift_lock[if_id][pup] = 1;
693 }
694 }
695 }
696 }
697
698
699 all_lock = 1;
700 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
701
702
703
704
705
706 }
707 }
708
709
710
711 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
712 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
713 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
714 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
715 if_id++) {
716 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
717
718 if (adll_shift_lock[if_id][pup] != 1) {
719 DEBUG_PBS_ENGINE(
720 DEBUG_LEVEL_ERROR,
721 ("PBS failed for IF #%d\n",
722 if_id));
723 training_result[training_stage][if_id]
724 = TEST_FAILED;
725
726 result_mat[if_id][pup][bit] = 0;
727 max_pbs_per_pup[if_id][pup] = 0;
728 min_pbs_per_pup[if_id][pup] = 0;
729 } else {
730 training_result[
731 training_stage][if_id] =
732 (training_result[training_stage]
733 [if_id] == TEST_FAILED) ?
734 TEST_FAILED : TEST_SUCCESS;
735 result_mat[if_id][pup][bit] =
736 result_all_bit[
737 bit + pup *
738 BUS_WIDTH_IN_BITS +
739 if_id * MAX_BUS_NUM *
740 BUS_WIDTH_IN_BITS] -
741 min_pbs_per_pup[if_id][pup];
742 }
743 DEBUG_PBS_ENGINE(
744 DEBUG_LEVEL_INFO,
745 ("The abs min_pbs[%d][%d] = %d\n",
746 if_id, pup,
747 min_pbs_per_pup[if_id][pup]));
748 }
749 }
750 }
751
752
753 ddr3_tip_clean_pbs_result(dev_num, pbs_mode);
754
755
756 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
757 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
758 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) {
759 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
760
761 DEBUG_PBS_ENGINE(
762 DEBUG_LEVEL_INFO,
763 ("Final Results: if_id %d, pup %d, Pup State: %d\n",
764 if_id, pup, pup_state[if_id][pup]));
765 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
766 if (dq_map_table == NULL) {
767 DEBUG_PBS_ENGINE(
768 DEBUG_LEVEL_ERROR,
769 ("dq_map_table not initialized\n"));
770 return MV_FAIL;
771 }
772 pad_num = dq_map_table[
773 bit + pup * BUS_WIDTH_IN_BITS +
774 if_id * BUS_WIDTH_IN_BITS *
775 tm->num_of_bus_per_interface];
776 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO,
777 ("result_mat: %d ",
778 result_mat[if_id][pup]
779 [bit]));
780 reg_addr = (pbs_mode == PBS_RX_MODE) ?
781 (PBS_RX_PHY_REG + effective_cs * 0x10) :
782 (PBS_TX_PHY_REG + effective_cs * 0x10);
783 CHECK_STATUS(ddr3_tip_bus_write
784 (dev_num, ACCESS_TYPE_UNICAST,
785 if_id, ACCESS_TYPE_UNICAST, pup,
786 DDR_PHY_DATA, reg_addr + pad_num,
787 result_mat[if_id][pup][bit]));
788 }
789 pbsdelay_per_pup[pbs_mode][if_id][pup] =
790 (max_pbs_per_pup[if_id][pup] ==
791 min_pbs_per_pup[if_id][pup]) ?
792 TYPICAL_PBS_VALUE :
793 ((max_adll_per_pup[if_id][pup] -
794 min_adll_per_pup[if_id][pup]) * adll_tap /
795 (max_pbs_per_pup[if_id][pup] -
796 min_pbs_per_pup[if_id][pup]));
797
798
799 if (pbs_mode == PBS_TX_MODE) {
800
801 reg_addr = (0x14 + effective_cs * 0x10);
802 CHECK_STATUS(ddr3_tip_bus_write
803 (dev_num, ACCESS_TYPE_UNICAST,
804 if_id, ACCESS_TYPE_UNICAST, pup,
805 DDR_PHY_DATA, reg_addr,
806 (max_pbs_per_pup[if_id][pup] -
807 min_pbs_per_pup[if_id][pup]) /
808 2));
809 reg_addr = (0x15 + effective_cs * 0x10);
810 CHECK_STATUS(ddr3_tip_bus_write
811 (dev_num, ACCESS_TYPE_UNICAST,
812 if_id, ACCESS_TYPE_UNICAST, pup,
813 DDR_PHY_DATA, reg_addr,
814 (max_pbs_per_pup[if_id][pup] -
815 min_pbs_per_pup[if_id][pup]) /
816 2));
817
818
819 reg_addr = (0x54 + effective_cs * 0x10);
820 CHECK_STATUS(ddr3_tip_bus_write
821 (dev_num, ACCESS_TYPE_UNICAST,
822 if_id, ACCESS_TYPE_UNICAST, pup,
823 DDR_PHY_DATA, reg_addr,
824 result_mat_rx_dqs[if_id][pup]
825 [effective_cs]));
826 reg_addr = (0x55 + effective_cs * 0x10);
827 CHECK_STATUS(ddr3_tip_bus_write
828 (dev_num, ACCESS_TYPE_UNICAST,
829 if_id, ACCESS_TYPE_UNICAST, pup,
830 DDR_PHY_DATA, reg_addr,
831 result_mat_rx_dqs[if_id][pup]
832 [effective_cs]));
833 } else {
834
835
836
837
838
839 result_mat_rx_dqs[if_id][pup][effective_cs] =
840 (max_pbs_per_pup[if_id][pup] -
841 min_pbs_per_pup[if_id][pup]) / 2;
842 }
843 DEBUG_PBS_ENGINE(
844 DEBUG_LEVEL_INFO,
845 (", PBS tap=%d [psec] ==> skew observed = %d\n",
846 pbsdelay_per_pup[pbs_mode][if_id][pup],
847 ((max_pbs_per_pup[if_id][pup] -
848 min_pbs_per_pup[if_id][pup]) *
849 pbsdelay_per_pup[pbs_mode][if_id][pup])));
850 }
851 }
852
853
854 reg_addr = (pbs_mode == PBS_RX_MODE) ?
855 (READ_CENTRALIZATION_PHY_REG + effective_cs * 4) :
856 (WRITE_CENTRALIZATION_PHY_REG + effective_cs * 4);
857 write_adll_value(nominal_adll, reg_addr);
858
859 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
860 reg_addr = (pbs_mode == PBS_RX_MODE) ?
861 (0x5a + effective_cs * 0x10) :
862 (0x1a + effective_cs * 0x10);
863 CHECK_STATUS(ddr3_tip_bus_write
864 (dev_num, ACCESS_TYPE_UNICAST, if_id,
865 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, reg_addr,
866 0));
867
868
869 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
870 CHECK_STATUS(ddr3_tip_if_write
871 (dev_num, ACCESS_TYPE_UNICAST, if_id,
872 CS_ENABLE_REG, cs_enable_reg_val[if_id],
873 MASK_ALL_BITS));
874 }
875
876
877 CHECK_STATUS(ddr3_tip_if_write
878 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
879 ODPG_WRITE_READ_MODE_ENABLE_REG, 0xffff, MASK_ALL_BITS));
880 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
881
882
883
884
885 if (pup_state[if_id][pup] == 1)
886 return MV_FAIL;
887 }
888
889 return MV_OK;
890}
891
892
893
894
895
896
897
898
899int ddr3_tip_pbs_rx(u32 uidev_num)
900{
901 return ddr3_tip_pbs(uidev_num, PBS_RX_MODE);
902}
903
904
905
906
907
908
909
910
911int ddr3_tip_pbs_tx(u32 uidev_num)
912{
913 return ddr3_tip_pbs(uidev_num, PBS_TX_MODE);
914}
915
916#ifndef EXCLUDE_SWITCH_DEBUG
917
918
919
920int ddr3_tip_print_all_pbs_result(u32 dev_num)
921{
922 u32 curr_cs;
923 u32 max_cs = hws_ddr3_tip_max_cs_get();
924
925 for (curr_cs = 0; curr_cs < max_cs; curr_cs++) {
926 ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_RX_MODE);
927 ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_TX_MODE);
928 }
929
930 return MV_OK;
931}
932
933
934
935
936int ddr3_tip_print_pbs_result(u32 dev_num, u32 cs_num, enum pbs_dir pbs_mode)
937{
938 u32 data_value = 0, bit = 0, if_id = 0, pup = 0;
939 u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
940 (PBS_RX_PHY_REG + cs_num * 0x10) :
941 (PBS_TX_PHY_REG + cs_num * 0x10);
942 struct hws_topology_map *tm = ddr3_get_topology_map();
943
944 printf("CS%d, %s ,PBS\n", cs_num,
945 (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
946
947 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) {
948 printf("%s, DQ", (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx");
949 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
950 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
951 printf("%d ,PBS,,, ", bit);
952 for (pup = 0; pup <= tm->num_of_bus_per_interface;
953 pup++) {
954 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
955 CHECK_STATUS(ddr3_tip_bus_read
956 (dev_num, if_id,
957 ACCESS_TYPE_UNICAST, pup,
958 DDR_PHY_DATA, reg_addr + bit,
959 &data_value));
960 printf("%d , ", data_value);
961 }
962 }
963 printf("\n");
964 }
965 printf("\n");
966
967 return MV_OK;
968}
969#endif
970
971
972
973
974int ddr3_tip_clean_pbs_result(u32 dev_num, enum pbs_dir pbs_mode)
975{
976 u32 if_id, pup, bit;
977 u32 reg_addr = (pbs_mode == PBS_RX_MODE) ?
978 (PBS_RX_PHY_REG + effective_cs * 0x10) :
979 (PBS_TX_PHY_REG + effective_cs * 0x10);
980 struct hws_topology_map *tm = ddr3_get_topology_map();
981
982 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
983 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
984 for (pup = 0; pup <= tm->num_of_bus_per_interface; pup++) {
985 for (bit = 0; bit <= BUS_WIDTH_IN_BITS + 3; bit++) {
986 CHECK_STATUS(ddr3_tip_bus_write
987 (dev_num, ACCESS_TYPE_UNICAST,
988 if_id, ACCESS_TYPE_UNICAST, pup,
989 DDR_PHY_DATA, reg_addr + bit, 0));
990 }
991 }
992 }
993
994 return MV_OK;
995}
996