1
2
3
4
5
6
7#include <common.h>
8#include <i2c.h>
9#include <spl.h>
10#include <asm/io.h>
11#include <asm/arch/cpu.h>
12#include <asm/arch/soc.h>
13
14#include "ddr3_hw_training.h"
15#include "xor.h"
16#include "xor_regs.h"
17
18static void ddr3_flush_l1_line(u32 line);
19
20extern u32 pbs_pattern[2][LEN_16BIT_PBS_PATTERN];
21extern u32 pbs_pattern_32b[2][LEN_PBS_PATTERN];
22#if defined(MV88F78X60)
23extern u32 pbs_pattern_64b[2][LEN_PBS_PATTERN];
24#endif
25extern u32 pbs_dq_mapping[PUP_NUM_64BIT][DQ_NUM];
26
27#if defined(MV88F78X60) || defined(MV88F672X)
28
29u32 pbs_locked_dq[MAX_PUP_NUM][DQ_NUM] = { { 0 } };
30u32 pbs_locked_dm[MAX_PUP_NUM] = { 0 };
31u32 pbs_locked_value[MAX_PUP_NUM][DQ_NUM] = { { 0 } };
32
33int per_bit_data[MAX_PUP_NUM][DQ_NUM];
34#endif
35
36static u32 sdram_data[LEN_KILLER_PATTERN] __aligned(32) = { 0 };
37
38static struct crc_dma_desc dma_desc __aligned(32) = { 0 };
39
40#define XOR_TIMEOUT 0x8000000
41
42struct xor_channel_t {
43 struct crc_dma_desc *desc;
44 unsigned long desc_phys_addr;
45};
46
47#define XOR_CAUSE_DONE_MASK(chan) ((0x1 | 0x2) << (chan * 16))
48
49void xor_waiton_eng(int chan)
50{
51 int timeout;
52
53 timeout = 0;
54 while (!(reg_read(XOR_CAUSE_REG(XOR_UNIT(chan))) &
55 XOR_CAUSE_DONE_MASK(XOR_CHAN(chan)))) {
56 if (timeout > XOR_TIMEOUT)
57 goto timeout;
58
59 timeout++;
60 }
61
62 timeout = 0;
63 while (mv_xor_state_get(chan) != MV_IDLE) {
64 if (timeout > XOR_TIMEOUT)
65 goto timeout;
66
67 timeout++;
68 }
69
70
71 reg_write(XOR_CAUSE_REG(XOR_UNIT(chan)),
72 ~(XOR_CAUSE_DONE_MASK(XOR_CHAN(chan))));
73
74timeout:
75 return;
76}
77
78static int special_compare_pattern(u32 uj)
79{
80 if ((uj == 30) || (uj == 31) || (uj == 61) || (uj == 62) ||
81 (uj == 93) || (uj == 94) || (uj == 126) || (uj == 127))
82 return 1;
83
84 return 0;
85}
86
87
88
89
90
91
92static void compare_pattern_v1(u32 uj, u32 *pup, u32 *pattern,
93 u32 pup_groups, int debug_dqs)
94{
95 u32 val;
96 u32 uk;
97 u32 var1;
98 u32 var2;
99 __maybe_unused u32 dq;
100
101 if (((sdram_data[uj]) != (pattern[uj])) && (*pup != 0xFF)) {
102 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
103 val = CMP_BYTE_SHIFT * uk;
104 var1 = ((sdram_data[uj] >> val) & CMP_BYTE_MASK);
105 var2 = ((pattern[uj] >> val) & CMP_BYTE_MASK);
106
107 if (var1 != var2) {
108 *pup |= (1 << (uk + (PUP_NUM_32BIT *
109 (uj % pup_groups))));
110
111#ifdef MV_DEBUG_DQS
112 if (!debug_dqs)
113 continue;
114
115 for (dq = 0; dq < DQ_NUM; dq++) {
116 val = uk + (PUP_NUM_32BIT *
117 (uj % pup_groups));
118 if (((var1 >> dq) & 0x1) !=
119 ((var2 >> dq) & 0x1))
120 per_bit_data[val][dq] = 1;
121 else
122 per_bit_data[val][dq] = 0;
123 }
124#endif
125 }
126 }
127 }
128}
129
130static void compare_pattern_v2(u32 uj, u32 *pup, u32 *pattern)
131{
132 u32 val;
133 u32 uk;
134 u32 var1;
135 u32 var2;
136
137 if (((sdram_data[uj]) != (pattern[uj])) && (*pup != 0x3)) {
138
139 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
140 val = CMP_BYTE_SHIFT * uk;
141 var1 = (sdram_data[uj] >> val) & CMP_BYTE_MASK;
142 var2 = (pattern[uj] >> val) & CMP_BYTE_MASK;
143 if (var1 != var2)
144 *pup |= (1 << (uk % PUP_NUM_16BIT));
145 }
146 }
147}
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164int ddr3_sdram_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
165 u32 *new_locked_pup, u32 *pattern,
166 u32 pattern_len, u32 sdram_offset, int write,
167 int mask, u32 *mask_pattern,
168 int special_compare)
169{
170 u32 uj;
171 __maybe_unused u32 pup_groups;
172 __maybe_unused u32 dq;
173
174#if !defined(MV88F67XX)
175 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
176 pup_groups = 2;
177 else
178 pup_groups = 1;
179#endif
180
181 ddr3_reset_phy_read_fifo();
182
183
184 if (write == 1)
185 ddr3_dram_sram_burst((u32)pattern, sdram_offset, pattern_len);
186
187 ddr3_dram_sram_burst(sdram_offset, (u32)sdram_data, pattern_len);
188
189
190 for (uj = 0; uj < pattern_len; uj++) {
191 if (special_compare && special_compare_pattern(uj))
192 continue;
193
194#if defined(MV88F78X60) || defined(MV88F672X)
195 compare_pattern_v1(uj, new_locked_pup, pattern, pup_groups, 1);
196#elif defined(MV88F67XX)
197 compare_pattern_v2(uj, new_locked_pup, pattern);
198#endif
199 }
200
201 return MV_OK;
202}
203
204#if defined(MV88F78X60) || defined(MV88F672X)
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220int ddr3_sdram_dm_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
221 u32 *new_locked_pup, u32 *pattern,
222 u32 sdram_offset)
223{
224 u32 uj, uk, var1, var2, pup_groups;
225 u32 val;
226 u32 pup = 0;
227
228 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
229 pup_groups = 2;
230 else
231 pup_groups = 1;
232
233 ddr3_dram_sram_burst((u32)pattern, SDRAM_PBS_TX_OFFS,
234 LEN_PBS_PATTERN);
235 ddr3_dram_sram_burst(SDRAM_PBS_TX_OFFS, (u32)sdram_data,
236 LEN_PBS_PATTERN);
237
238
239 for (uj = 0; uj < LEN_PBS_PATTERN; uj++)
240 compare_pattern_v1(uj, &pup, pattern, pup_groups, 0);
241
242
243 *(u32 *)(SDRAM_PBS_TX_OFFS + 0x10) = 0x12345678;
244 *(u32 *)(SDRAM_PBS_TX_OFFS + 0x14) = 0x12345678;
245
246 sdram_data[0] = *(u32 *)(SDRAM_PBS_TX_OFFS + 0x10);
247 sdram_data[1] = *(u32 *)(SDRAM_PBS_TX_OFFS + 0x14);
248
249 for (uj = 0; uj < 2; uj++) {
250 if (((sdram_data[uj]) != (pattern[uj])) &&
251 (*new_locked_pup != 0xFF)) {
252 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
253 val = CMP_BYTE_SHIFT * uk;
254 var1 = ((sdram_data[uj] >> val) & CMP_BYTE_MASK);
255 var2 = ((pattern[uj] >> val) & CMP_BYTE_MASK);
256 if (var1 != var2) {
257 *new_locked_pup |= (1 << (uk +
258 (PUP_NUM_32BIT * (uj % pup_groups))));
259 *new_locked_pup |= pup;
260 }
261 }
262 }
263 }
264
265 return MV_OK;
266}
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281int ddr3_sdram_pbs_compare(MV_DRAM_INFO *dram_info, u32 pup_locked,
282 int is_tx, u32 pbs_pattern_idx,
283 u32 pbs_curr_val, u32 pbs_lock_val,
284 u32 *skew_array, u8 *unlock_pup_dq_array,
285 u32 ecc)
286{
287
288 u32 pbs_write_pup[DQ_NUM] = { 0 };
289 u32 update_pup;
290 u32 max_pup;
291 u32 pup_addr;
292 u32 ui, dq, pup;
293 int var1, var2;
294 u32 sdram_offset, pup_groups, tmp_pup;
295 u32 *pattern_ptr;
296 u32 val;
297
298
299 switch (dram_info->ddr_width) {
300#if defined(MV88F672X)
301 case 16:
302 pattern_ptr = (u32 *)&pbs_pattern[pbs_pattern_idx];
303 break;
304#endif
305 case 32:
306 pattern_ptr = (u32 *)&pbs_pattern_32b[pbs_pattern_idx];
307 break;
308#if defined(MV88F78X60)
309 case 64:
310 pattern_ptr = (u32 *)&pbs_pattern_64b[pbs_pattern_idx];
311 break;
312#endif
313 default:
314 return MV_FAIL;
315 }
316
317 max_pup = dram_info->num_of_std_pups;
318
319 sdram_offset = SDRAM_PBS_I_OFFS + pbs_pattern_idx * SDRAM_PBS_NEXT_OFFS;
320
321 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
322 pup_groups = 2;
323 else
324 pup_groups = 1;
325
326 ddr3_reset_phy_read_fifo();
327
328
329 if (is_tx == 1) {
330 ddr3_dram_sram_burst((u32)pattern_ptr, sdram_offset,
331 LEN_PBS_PATTERN);
332 }
333
334 ddr3_dram_sram_read(sdram_offset, (u32)sdram_data, LEN_PBS_PATTERN);
335
336
337 for (ui = 0; ui < LEN_PBS_PATTERN; ui++) {
338 if ((sdram_data[ui]) != (pattern_ptr[ui])) {
339
340
341 for (pup = 0; pup < PUP_NUM_32BIT; pup++) {
342 val = CMP_BYTE_SHIFT * pup;
343 var1 = ((sdram_data[ui] >> val) &
344 CMP_BYTE_MASK);
345 var2 = ((pattern_ptr[ui] >> val) &
346 CMP_BYTE_MASK);
347
348 if (var1 != var2) {
349 if (dram_info->ddr_width > 16) {
350 tmp_pup = (pup + PUP_NUM_32BIT *
351 (ui % pup_groups));
352 } else {
353 tmp_pup = (pup % PUP_NUM_16BIT);
354 }
355
356 update_pup = (1 << tmp_pup);
357 if (ecc && (update_pup != 0x1))
358 continue;
359
360
361
362
363
364 for (dq = 0; dq < DQ_NUM; dq++) {
365 val = tmp_pup * (1 - ecc) +
366 ecc * ECC_PUP;
367 if (((var1 >> dq) & 0x1) !=
368 ((var2 >> dq) & 0x1)) {
369 if (pbs_locked_dq[val][dq] == 1 &&
370 pbs_locked_value[val][dq] != pbs_curr_val)
371 continue;
372
373
374
375
376
377
378 pbs_write_pup[dq] |=
379 update_pup;
380
381
382
383
384
385 unlock_pup_dq_array[dq] &=
386 ~update_pup;
387
388
389
390
391
392
393 skew_array[tmp_pup * DQ_NUM + dq] =
394 pbs_curr_val;
395 }
396 }
397 }
398 }
399 }
400 }
401
402 pup_addr = (is_tx == 1) ? PUP_PBS_TX : PUP_PBS_RX;
403
404
405 for (dq = 0; dq < DQ_NUM; dq++) {
406 for (pup = 0; pup < max_pup; pup++) {
407 if (pbs_write_pup[dq] & (1 << pup)) {
408 val = pup * (1 - ecc) + ecc * ECC_PUP;
409 if (pbs_locked_dq[val][dq] == 1 &&
410 pbs_locked_value[val][dq] != pbs_curr_val)
411 continue;
412
413
414 pbs_locked_dq[val][dq] = 1;
415 pbs_locked_value[val][dq] = pbs_curr_val;
416 ddr3_write_pup_reg(pup_addr +
417 pbs_dq_mapping[val][dq],
418 CS0, val, 0, pbs_lock_val);
419 }
420 }
421 }
422
423 return MV_OK;
424}
425#endif
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442int ddr3_sdram_direct_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
443 u32 *new_locked_pup, u32 *pattern,
444 u32 pattern_len, u32 sdram_offset,
445 int write, int mask, u32 *mask_pattern)
446{
447 u32 uj, uk, pup_groups;
448 u32 *sdram_addr;
449
450 sdram_addr = (u32 *)sdram_offset;
451
452 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
453 pup_groups = 2;
454 else
455 pup_groups = 1;
456
457
458 if (write == 1) {
459 for (uk = 0; uk < pattern_len; uk++) {
460 *sdram_addr = pattern[uk];
461 sdram_addr++;
462 }
463 }
464
465 sdram_addr = (u32 *)sdram_offset;
466
467 for (uk = 0; uk < pattern_len; uk++) {
468 sdram_data[uk] = *sdram_addr;
469 sdram_addr++;
470 }
471
472
473 for (uj = 0; uj < pattern_len; uj++) {
474 if (dram_info->ddr_width > 16) {
475 compare_pattern_v1(uj, new_locked_pup, pattern,
476 pup_groups, 0);
477 } else {
478 compare_pattern_v2(uj, new_locked_pup, pattern);
479 }
480 }
481
482 return MV_OK;
483}
484
485
486
487
488
489
490
491
492
493int ddr3_dram_sram_burst(u32 src, u32 dst, u32 len)
494{
495 u32 chan, byte_count, cs_num, byte;
496 struct xor_channel_t channel;
497
498 chan = 0;
499 byte_count = len * 4;
500
501
502 while (mv_xor_state_get(chan) != MV_IDLE)
503 ;
504
505
506 channel.desc = &dma_desc;
507
508
509 if (src < SRAM_BASE) {
510
511 cs_num = (src / (1 + SDRAM_CS_SIZE));
512 reg_write(XOR_ADDR_OVRD_REG(0, 0),
513 ((cs_num << 1) | (1 << 0)));
514 channel.desc->src_addr0 = (src % (1 + SDRAM_CS_SIZE));
515 channel.desc->dst_addr = dst;
516 } else {
517
518 cs_num = (dst / (1 + SDRAM_CS_SIZE));
519 reg_write(XOR_ADDR_OVRD_REG(0, 0),
520 ((cs_num << 25) | (1 << 24)));
521 channel.desc->src_addr0 = (src);
522 channel.desc->dst_addr = (dst % (1 + SDRAM_CS_SIZE));
523 channel.desc->src_addr0 = src;
524 channel.desc->dst_addr = (dst % (1 + SDRAM_CS_SIZE));
525 }
526
527 channel.desc->src_addr1 = 0;
528 channel.desc->byte_cnt = byte_count;
529 channel.desc->next_desc_ptr = 0;
530 channel.desc->status = 1 << 31;
531 channel.desc->desc_cmd = 0x0;
532 channel.desc_phys_addr = (unsigned long)&dma_desc;
533
534 ddr3_flush_l1_line((u32)&dma_desc);
535
536
537 if (mv_xor_transfer(chan, MV_DMA, channel.desc_phys_addr) != MV_OK)
538 return MV_FAIL;
539
540
541 xor_waiton_eng(chan);
542
543 if (dst > SRAM_BASE) {
544 for (byte = 0; byte < byte_count; byte += 0x20)
545 cache_inv(dst + byte);
546 }
547
548 return MV_OK;
549}
550
551
552
553
554
555
556
557
558static void ddr3_flush_l1_line(u32 line)
559{
560 u32 reg;
561
562#if defined(MV88F672X)
563 reg = 1;
564#else
565 reg = reg_read(REG_SAMPLE_RESET_LOW_ADDR) &
566 (1 << REG_SAMPLE_RESET_CPU_ARCH_OFFS);
567#ifdef MV88F67XX
568 reg = ~reg & (1 << REG_SAMPLE_RESET_CPU_ARCH_OFFS);
569#endif
570#endif
571
572 if (reg) {
573
574 flush_l1_v7(line);
575 flush_l1_v7(line + CACHE_LINE_SIZE);
576 } else {
577
578 flush_l1_v6(line);
579 flush_l1_v6(line + CACHE_LINE_SIZE);
580 }
581}
582
583int ddr3_dram_sram_read(u32 src, u32 dst, u32 len)
584{
585 u32 ui;
586 u32 *dst_ptr, *src_ptr;
587
588 dst_ptr = (u32 *)dst;
589 src_ptr = (u32 *)src;
590
591 for (ui = 0; ui < len; ui++) {
592 *dst_ptr = *src_ptr;
593 dst_ptr++;
594 src_ptr++;
595 }
596
597 return MV_OK;
598}
599
600int ddr3_sdram_dqs_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
601 u32 *new_locked_pup, u32 *pattern,
602 u32 pattern_len, u32 sdram_offset, int write,
603 int mask, u32 *mask_pattern,
604 int special_compare)
605{
606 u32 uj, pup_groups;
607
608 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
609 pup_groups = 2;
610 else
611 pup_groups = 1;
612
613 ddr3_reset_phy_read_fifo();
614
615
616 if (write == 1)
617 ddr3_dram_sram_burst((u32)pattern, sdram_offset, pattern_len);
618
619 ddr3_dram_sram_burst(sdram_offset, (u32)sdram_data, pattern_len);
620
621
622 for (uj = 0; uj < pattern_len; uj++) {
623 if (special_compare && special_compare_pattern(uj))
624 continue;
625
626 if (dram_info->ddr_width > 16) {
627 compare_pattern_v1(uj, new_locked_pup, pattern,
628 pup_groups, 1);
629 } else {
630 compare_pattern_v2(uj, new_locked_pup, pattern);
631 }
632 }
633
634 return MV_OK;
635}
636
637void ddr3_reset_phy_read_fifo(void)
638{
639 u32 reg;
640
641
642 reg = reg_read(REG_DRAM_TRAINING_ADDR);
643
644 reg |= (1 << REG_DRAM_TRAINING_RL_OFFS);
645
646
647 reg_write(REG_DRAM_TRAINING_ADDR, reg);
648
649 reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
650 reg |= ((1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS) +
651 (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS));
652
653
654
655 reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
656
657 do {
658 reg = reg_read(REG_DRAM_TRAINING_2_ADDR) &
659 (1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS);
660 } while (reg);
661
662 reg = reg_read(REG_DRAM_TRAINING_ADDR);
663
664
665 reg &= ~(1 << REG_DRAM_TRAINING_RL_OFFS);
666
667
668 reg_write(REG_DRAM_TRAINING_ADDR, reg);
669}
670