1
2
3
4
5
6#include <common.h>
7#include <i2c.h>
8#include <spl.h>
9#include <asm/io.h>
10#include <asm/arch/cpu.h>
11#include <asm/arch/soc.h>
12
13#include "ddr3_hw_training.h"
14#include "xor.h"
15#include "xor_regs.h"
16
17static void ddr3_flush_l1_line(u32 line);
18
19extern u32 pbs_pattern[2][LEN_16BIT_PBS_PATTERN];
20extern u32 pbs_pattern_32b[2][LEN_PBS_PATTERN];
21#if defined(MV88F78X60)
22extern u32 pbs_pattern_64b[2][LEN_PBS_PATTERN];
23#endif
24extern u32 pbs_dq_mapping[PUP_NUM_64BIT + 1][DQ_NUM];
25
26#if defined(MV88F78X60) || defined(MV88F672X)
27
28u32 pbs_locked_dq[MAX_PUP_NUM][DQ_NUM] = { { 0 } };
29u32 pbs_locked_dm[MAX_PUP_NUM] = { 0 };
30u32 pbs_locked_value[MAX_PUP_NUM][DQ_NUM] = { { 0 } };
31
32int per_bit_data[MAX_PUP_NUM][DQ_NUM];
33#endif
34
35static u32 sdram_data[LEN_KILLER_PATTERN] __aligned(32) = { 0 };
36
37static struct crc_dma_desc dma_desc __aligned(32) = { 0 };
38
39#define XOR_TIMEOUT 0x8000000
40
41struct xor_channel_t {
42 struct crc_dma_desc *desc;
43 unsigned long desc_phys_addr;
44};
45
46#define XOR_CAUSE_DONE_MASK(chan) ((0x1 | 0x2) << (chan * 16))
47
48void xor_waiton_eng(int chan)
49{
50 int timeout;
51
52 timeout = 0;
53 while (!(reg_read(XOR_CAUSE_REG(XOR_UNIT(chan))) &
54 XOR_CAUSE_DONE_MASK(XOR_CHAN(chan)))) {
55 if (timeout > XOR_TIMEOUT)
56 goto timeout;
57
58 timeout++;
59 }
60
61 timeout = 0;
62 while (mv_xor_state_get(chan) != MV_IDLE) {
63 if (timeout > XOR_TIMEOUT)
64 goto timeout;
65
66 timeout++;
67 }
68
69
70 reg_write(XOR_CAUSE_REG(XOR_UNIT(chan)),
71 ~(XOR_CAUSE_DONE_MASK(XOR_CHAN(chan))));
72
73timeout:
74 return;
75}
76
77static int special_compare_pattern(u32 uj)
78{
79 if ((uj == 30) || (uj == 31) || (uj == 61) || (uj == 62) ||
80 (uj == 93) || (uj == 94) || (uj == 126) || (uj == 127))
81 return 1;
82
83 return 0;
84}
85
86
87
88
89
90
91static void compare_pattern_v1(u32 uj, u32 *pup, u32 *pattern,
92 u32 pup_groups, int debug_dqs)
93{
94 u32 val;
95 u32 uk;
96 u32 var1;
97 u32 var2;
98 __maybe_unused u32 dq;
99
100 if (((sdram_data[uj]) != (pattern[uj])) && (*pup != 0xFF)) {
101 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
102 val = CMP_BYTE_SHIFT * uk;
103 var1 = ((sdram_data[uj] >> val) & CMP_BYTE_MASK);
104 var2 = ((pattern[uj] >> val) & CMP_BYTE_MASK);
105
106 if (var1 != var2) {
107 *pup |= (1 << (uk + (PUP_NUM_32BIT *
108 (uj % pup_groups))));
109
110#ifdef MV_DEBUG_DQS
111 if (!debug_dqs)
112 continue;
113
114 for (dq = 0; dq < DQ_NUM; dq++) {
115 val = uk + (PUP_NUM_32BIT *
116 (uj % pup_groups));
117 if (((var1 >> dq) & 0x1) !=
118 ((var2 >> dq) & 0x1))
119 per_bit_data[val][dq] = 1;
120 else
121 per_bit_data[val][dq] = 0;
122 }
123#endif
124 }
125 }
126 }
127}
128
129static void compare_pattern_v2(u32 uj, u32 *pup, u32 *pattern)
130{
131 u32 val;
132 u32 uk;
133 u32 var1;
134 u32 var2;
135
136 if (((sdram_data[uj]) != (pattern[uj])) && (*pup != 0x3)) {
137
138 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
139 val = CMP_BYTE_SHIFT * uk;
140 var1 = (sdram_data[uj] >> val) & CMP_BYTE_MASK;
141 var2 = (pattern[uj] >> val) & CMP_BYTE_MASK;
142 if (var1 != var2)
143 *pup |= (1 << (uk % PUP_NUM_16BIT));
144 }
145 }
146}
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163int ddr3_sdram_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
164 u32 *new_locked_pup, u32 *pattern,
165 u32 pattern_len, u32 sdram_offset, int write,
166 int mask, u32 *mask_pattern,
167 int special_compare)
168{
169 u32 uj;
170 __maybe_unused u32 pup_groups;
171 __maybe_unused u32 dq;
172
173#if !defined(MV88F67XX)
174 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
175 pup_groups = 2;
176 else
177 pup_groups = 1;
178#endif
179
180 ddr3_reset_phy_read_fifo();
181
182
183 if (write == 1)
184 ddr3_dram_sram_burst((u32)pattern, sdram_offset, pattern_len);
185
186 ddr3_dram_sram_burst(sdram_offset, (u32)sdram_data, pattern_len);
187
188
189 for (uj = 0; uj < pattern_len; uj++) {
190 if (special_compare && special_compare_pattern(uj))
191 continue;
192
193#if defined(MV88F78X60) || defined(MV88F672X)
194 compare_pattern_v1(uj, new_locked_pup, pattern, pup_groups, 1);
195#elif defined(MV88F67XX)
196 compare_pattern_v2(uj, new_locked_pup, pattern);
197#endif
198 }
199
200 return MV_OK;
201}
202
203#if defined(MV88F78X60) || defined(MV88F672X)
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219int ddr3_sdram_dm_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
220 u32 *new_locked_pup, u32 *pattern,
221 u32 sdram_offset)
222{
223 u32 uj, uk, var1, var2, pup_groups;
224 u32 val;
225 u32 pup = 0;
226
227 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
228 pup_groups = 2;
229 else
230 pup_groups = 1;
231
232 ddr3_dram_sram_burst((u32)pattern, SDRAM_PBS_TX_OFFS,
233 LEN_PBS_PATTERN);
234 ddr3_dram_sram_burst(SDRAM_PBS_TX_OFFS, (u32)sdram_data,
235 LEN_PBS_PATTERN);
236
237
238 for (uj = 0; uj < LEN_PBS_PATTERN; uj++)
239 compare_pattern_v1(uj, &pup, pattern, pup_groups, 0);
240
241
242 *(u32 *)(SDRAM_PBS_TX_OFFS + 0x10) = 0x12345678;
243 *(u32 *)(SDRAM_PBS_TX_OFFS + 0x14) = 0x12345678;
244
245 sdram_data[0] = *(u32 *)(SDRAM_PBS_TX_OFFS + 0x10);
246 sdram_data[1] = *(u32 *)(SDRAM_PBS_TX_OFFS + 0x14);
247
248 for (uj = 0; uj < 2; uj++) {
249 if (((sdram_data[uj]) != (pattern[uj])) &&
250 (*new_locked_pup != 0xFF)) {
251 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
252 val = CMP_BYTE_SHIFT * uk;
253 var1 = ((sdram_data[uj] >> val) & CMP_BYTE_MASK);
254 var2 = ((pattern[uj] >> val) & CMP_BYTE_MASK);
255 if (var1 != var2) {
256 *new_locked_pup |= (1 << (uk +
257 (PUP_NUM_32BIT * (uj % pup_groups))));
258 *new_locked_pup |= pup;
259 }
260 }
261 }
262 }
263
264 return MV_OK;
265}
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280int ddr3_sdram_pbs_compare(MV_DRAM_INFO *dram_info, u32 pup_locked,
281 int is_tx, u32 pbs_pattern_idx,
282 u32 pbs_curr_val, u32 pbs_lock_val,
283 u32 *skew_array, u8 *unlock_pup_dq_array,
284 u32 ecc)
285{
286
287 u32 pbs_write_pup[DQ_NUM] = { 0 };
288 u32 update_pup;
289 u32 max_pup;
290 u32 pup_addr;
291 u32 ui, dq, pup;
292 int var1, var2;
293 u32 sdram_offset, pup_groups, tmp_pup;
294 u32 *pattern_ptr;
295 u32 val;
296
297
298 switch (dram_info->ddr_width) {
299#if defined(MV88F672X)
300 case 16:
301 pattern_ptr = (u32 *)&pbs_pattern[pbs_pattern_idx];
302 break;
303#endif
304 case 32:
305 pattern_ptr = (u32 *)&pbs_pattern_32b[pbs_pattern_idx];
306 break;
307#if defined(MV88F78X60)
308 case 64:
309 pattern_ptr = (u32 *)&pbs_pattern_64b[pbs_pattern_idx];
310 break;
311#endif
312 default:
313 return MV_FAIL;
314 }
315
316 max_pup = dram_info->num_of_std_pups;
317
318 sdram_offset = SDRAM_PBS_I_OFFS + pbs_pattern_idx * SDRAM_PBS_NEXT_OFFS;
319
320 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
321 pup_groups = 2;
322 else
323 pup_groups = 1;
324
325 ddr3_reset_phy_read_fifo();
326
327
328 if (is_tx == 1) {
329 ddr3_dram_sram_burst((u32)pattern_ptr, sdram_offset,
330 LEN_PBS_PATTERN);
331 }
332
333 ddr3_dram_sram_read(sdram_offset, (u32)sdram_data, LEN_PBS_PATTERN);
334
335
336 for (ui = 0; ui < LEN_PBS_PATTERN; ui++) {
337 if ((sdram_data[ui]) != (pattern_ptr[ui])) {
338
339
340 for (pup = 0; pup < PUP_NUM_32BIT; pup++) {
341 val = CMP_BYTE_SHIFT * pup;
342 var1 = ((sdram_data[ui] >> val) &
343 CMP_BYTE_MASK);
344 var2 = ((pattern_ptr[ui] >> val) &
345 CMP_BYTE_MASK);
346
347 if (var1 != var2) {
348 if (dram_info->ddr_width > 16) {
349 tmp_pup = (pup + PUP_NUM_32BIT *
350 (ui % pup_groups));
351 } else {
352 tmp_pup = (pup % PUP_NUM_16BIT);
353 }
354
355 update_pup = (1 << tmp_pup);
356 if (ecc && (update_pup != 0x1))
357 continue;
358
359
360
361
362
363 for (dq = 0; dq < DQ_NUM; dq++) {
364 val = tmp_pup * (1 - ecc) +
365 ecc * ECC_PUP;
366 if (((var1 >> dq) & 0x1) !=
367 ((var2 >> dq) & 0x1)) {
368 if (pbs_locked_dq[val][dq] == 1 &&
369 pbs_locked_value[val][dq] != pbs_curr_val)
370 continue;
371
372
373
374
375
376
377 pbs_write_pup[dq] |=
378 update_pup;
379
380
381
382
383
384 unlock_pup_dq_array[dq] &=
385 ~update_pup;
386
387
388
389
390
391
392 skew_array[tmp_pup * DQ_NUM + dq] =
393 pbs_curr_val;
394 }
395 }
396 }
397 }
398 }
399 }
400
401 pup_addr = (is_tx == 1) ? PUP_PBS_TX : PUP_PBS_RX;
402
403
404 for (dq = 0; dq < DQ_NUM; dq++) {
405 for (pup = 0; pup < max_pup; pup++) {
406 if (pbs_write_pup[dq] & (1 << pup)) {
407 val = pup * (1 - ecc) + ecc * ECC_PUP;
408 if (pbs_locked_dq[val][dq] == 1 &&
409 pbs_locked_value[val][dq] != pbs_curr_val)
410 continue;
411
412
413 pbs_locked_dq[val][dq] = 1;
414 pbs_locked_value[val][dq] = pbs_curr_val;
415 ddr3_write_pup_reg(pup_addr +
416 pbs_dq_mapping[val][dq],
417 CS0, val, 0, pbs_lock_val);
418 }
419 }
420 }
421
422 return MV_OK;
423}
424#endif
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441int ddr3_sdram_direct_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
442 u32 *new_locked_pup, u32 *pattern,
443 u32 pattern_len, u32 sdram_offset,
444 int write, int mask, u32 *mask_pattern)
445{
446 u32 uj, uk, pup_groups;
447 u32 *sdram_addr;
448
449 sdram_addr = (u32 *)sdram_offset;
450
451 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
452 pup_groups = 2;
453 else
454 pup_groups = 1;
455
456
457 if (write == 1) {
458 for (uk = 0; uk < pattern_len; uk++) {
459 *sdram_addr = pattern[uk];
460 sdram_addr++;
461 }
462 }
463
464 sdram_addr = (u32 *)sdram_offset;
465
466 for (uk = 0; uk < pattern_len; uk++) {
467 sdram_data[uk] = *sdram_addr;
468 sdram_addr++;
469 }
470
471
472 for (uj = 0; uj < pattern_len; uj++) {
473 if (dram_info->ddr_width > 16) {
474 compare_pattern_v1(uj, new_locked_pup, pattern,
475 pup_groups, 0);
476 } else {
477 compare_pattern_v2(uj, new_locked_pup, pattern);
478 }
479 }
480
481 return MV_OK;
482}
483
484
485
486
487
488
489
490
491
492int ddr3_dram_sram_burst(u32 src, u32 dst, u32 len)
493{
494 u32 chan, byte_count, cs_num, byte;
495 struct xor_channel_t channel;
496
497 chan = 0;
498 byte_count = len * 4;
499
500
501 while (mv_xor_state_get(chan) != MV_IDLE)
502 ;
503
504
505 channel.desc = &dma_desc;
506
507
508 if (src < SRAM_BASE) {
509
510 cs_num = (src / (1 + SDRAM_CS_SIZE));
511 reg_write(XOR_ADDR_OVRD_REG(0, 0),
512 ((cs_num << 1) | (1 << 0)));
513 channel.desc->src_addr0 = (src % (1 + SDRAM_CS_SIZE));
514 channel.desc->dst_addr = dst;
515 } else {
516
517 cs_num = (dst / (1 + SDRAM_CS_SIZE));
518 reg_write(XOR_ADDR_OVRD_REG(0, 0),
519 ((cs_num << 25) | (1 << 24)));
520 channel.desc->src_addr0 = (src);
521 channel.desc->dst_addr = (dst % (1 + SDRAM_CS_SIZE));
522 channel.desc->src_addr0 = src;
523 channel.desc->dst_addr = (dst % (1 + SDRAM_CS_SIZE));
524 }
525
526 channel.desc->src_addr1 = 0;
527 channel.desc->byte_cnt = byte_count;
528 channel.desc->next_desc_ptr = 0;
529 channel.desc->status = 1 << 31;
530 channel.desc->desc_cmd = 0x0;
531 channel.desc_phys_addr = (unsigned long)&dma_desc;
532
533 ddr3_flush_l1_line((u32)&dma_desc);
534
535
536 if (mv_xor_transfer(chan, MV_DMA, channel.desc_phys_addr) != MV_OK)
537 return MV_FAIL;
538
539
540 xor_waiton_eng(chan);
541
542 if (dst > SRAM_BASE) {
543 for (byte = 0; byte < byte_count; byte += 0x20)
544 cache_inv(dst + byte);
545 }
546
547 return MV_OK;
548}
549
550
551
552
553
554
555
556
557static void ddr3_flush_l1_line(u32 line)
558{
559 u32 reg;
560
561#if defined(MV88F672X)
562 reg = 1;
563#else
564 reg = reg_read(REG_SAMPLE_RESET_LOW_ADDR) &
565 (1 << REG_SAMPLE_RESET_CPU_ARCH_OFFS);
566#ifdef MV88F67XX
567 reg = ~reg & (1 << REG_SAMPLE_RESET_CPU_ARCH_OFFS);
568#endif
569#endif
570
571 if (reg) {
572
573 flush_l1_v7(line);
574 flush_l1_v7(line + CACHE_LINE_SIZE);
575 } else {
576
577 flush_l1_v6(line);
578 flush_l1_v6(line + CACHE_LINE_SIZE);
579 }
580}
581
582int ddr3_dram_sram_read(u32 src, u32 dst, u32 len)
583{
584 u32 ui;
585 u32 *dst_ptr, *src_ptr;
586
587 dst_ptr = (u32 *)dst;
588 src_ptr = (u32 *)src;
589
590 for (ui = 0; ui < len; ui++) {
591 *dst_ptr = *src_ptr;
592 dst_ptr++;
593 src_ptr++;
594 }
595
596 return MV_OK;
597}
598
599int ddr3_sdram_dqs_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
600 u32 *new_locked_pup, u32 *pattern,
601 u32 pattern_len, u32 sdram_offset, int write,
602 int mask, u32 *mask_pattern,
603 int special_compare)
604{
605 u32 uj, pup_groups;
606
607 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
608 pup_groups = 2;
609 else
610 pup_groups = 1;
611
612 ddr3_reset_phy_read_fifo();
613
614
615 if (write == 1)
616 ddr3_dram_sram_burst((u32)pattern, sdram_offset, pattern_len);
617
618 ddr3_dram_sram_burst(sdram_offset, (u32)sdram_data, pattern_len);
619
620
621 for (uj = 0; uj < pattern_len; uj++) {
622 if (special_compare && special_compare_pattern(uj))
623 continue;
624
625 if (dram_info->ddr_width > 16) {
626 compare_pattern_v1(uj, new_locked_pup, pattern,
627 pup_groups, 1);
628 } else {
629 compare_pattern_v2(uj, new_locked_pup, pattern);
630 }
631 }
632
633 return MV_OK;
634}
635
636void ddr3_reset_phy_read_fifo(void)
637{
638 u32 reg;
639
640
641 reg = reg_read(REG_DRAM_TRAINING_ADDR);
642
643 reg |= (1 << REG_DRAM_TRAINING_RL_OFFS);
644
645
646 reg_write(REG_DRAM_TRAINING_ADDR, reg);
647
648 reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
649 reg |= ((1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS) +
650 (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS));
651
652
653
654 reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
655
656 do {
657 reg = reg_read(REG_DRAM_TRAINING_2_ADDR) &
658 (1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS);
659 } while (reg);
660
661 reg = reg_read(REG_DRAM_TRAINING_ADDR);
662
663
664 reg &= ~(1 << REG_DRAM_TRAINING_RL_OFFS);
665
666
667 reg_write(REG_DRAM_TRAINING_ADDR, reg);
668}
669