1
2
3
4
5
6
7
8
9
10
11#include <common.h>
12#include <pci.h>
13#include <asm/arch/device.h>
14#include <asm/arch/mrc.h>
15#include <asm/arch/msg_port.h>
16#include "mrc_util.h"
17#include "hte.h"
18#include "smc.h"
19
20
21static const uint32_t t_rfc[5] = {
22 90000,
23 110000,
24 160000,
25 300000,
26 350000,
27};
28
29
30static const uint32_t t_ck[3] = {
31 2500,
32 1875,
33 1500
34};
35
36
37static const uint16_t ddr_wclk[] = {193, 158};
38static const uint16_t ddr_wctl[] = {1, 217};
39static const uint16_t ddr_wcmd[] = {1, 220};
40
41#ifdef BACKUP_RCVN
42static const uint16_t ddr_rcvn[] = {129, 498};
43#endif
44
45#ifdef BACKUP_WDQS
46static const uint16_t ddr_wdqs[] = {65, 289};
47#endif
48
49#ifdef BACKUP_RDQS
50static const uint8_t ddr_rdqs[] = {32, 24};
51#endif
52
53#ifdef BACKUP_WDQ
54static const uint16_t ddr_wdq[] = {32, 257};
55#endif
56
57
58void clear_self_refresh(struct mrc_params *mrc_params)
59{
60 ENTERFN();
61
62
63 mrc_write_mask(MEM_CTLR, PMSTS, PMSTS_DISR, PMSTS_DISR);
64
65 LEAVEFN();
66}
67
68
69void prog_ddr_timing_control(struct mrc_params *mrc_params)
70{
71 uint8_t tcl, wl;
72 uint8_t trp, trcd, tras, twr, twtr, trrd, trtp, tfaw;
73 uint32_t tck;
74 u32 dtr0, dtr1, dtr2, dtr3, dtr4;
75 u32 tmp1, tmp2;
76
77 ENTERFN();
78
79
80 mrc_post_code(0x02, 0x00);
81
82 dtr0 = msg_port_read(MEM_CTLR, DTR0);
83 dtr1 = msg_port_read(MEM_CTLR, DTR1);
84 dtr2 = msg_port_read(MEM_CTLR, DTR2);
85 dtr3 = msg_port_read(MEM_CTLR, DTR3);
86 dtr4 = msg_port_read(MEM_CTLR, DTR4);
87
88 tck = t_ck[mrc_params->ddr_speed];
89 tcl = mrc_params->params.cl;
90 trp = tcl;
91 trcd = tcl;
92 tras = MCEIL(mrc_params->params.ras, tck);
93
94
95 twr = MCEIL(15000, tck);
96
97 twtr = MCEIL(mrc_params->params.wtr, tck);
98 trrd = MCEIL(mrc_params->params.rrd, tck);
99 trtp = 4;
100 tfaw = MCEIL(mrc_params->params.faw, tck);
101
102 wl = 5 + mrc_params->ddr_speed;
103
104 dtr0 &= ~DTR0_DFREQ_MASK;
105 dtr0 |= mrc_params->ddr_speed;
106 dtr0 &= ~DTR0_TCL_MASK;
107 tmp1 = tcl - 5;
108 dtr0 |= ((tcl - 5) << 12);
109 dtr0 &= ~DTR0_TRP_MASK;
110 dtr0 |= ((trp - 5) << 4);
111 dtr0 &= ~DTR0_TRCD_MASK;
112 dtr0 |= ((trcd - 5) << 8);
113
114 dtr1 &= ~DTR1_TWCL_MASK;
115 tmp2 = wl - 3;
116 dtr1 |= (wl - 3);
117 dtr1 &= ~DTR1_TWTP_MASK;
118 dtr1 |= ((wl + 4 + twr - 14) << 8);
119 dtr1 &= ~DTR1_TRTP_MASK;
120 dtr1 |= ((MMAX(trtp, 4) - 3) << 28);
121 dtr1 &= ~DTR1_TRRD_MASK;
122 dtr1 |= ((trrd - 4) << 24);
123 dtr1 &= ~DTR1_TCMD_MASK;
124 dtr1 |= (1 << 4);
125 dtr1 &= ~DTR1_TRAS_MASK;
126 dtr1 |= ((tras - 14) << 20);
127 dtr1 &= ~DTR1_TFAW_MASK;
128 dtr1 |= ((((tfaw + 1) >> 1) - 5) << 16);
129
130 dtr1 &= ~DTR1_TCCD_MASK;
131
132 dtr2 &= ~DTR2_TRRDR_MASK;
133 dtr2 |= 1;
134 dtr2 &= ~DTR2_TWWDR_MASK;
135 dtr2 |= (2 << 8);
136 dtr2 &= ~DTR2_TRWDR_MASK;
137 dtr2 |= (2 << 16);
138
139 dtr3 &= ~DTR3_TWRDR_MASK;
140 dtr3 |= 2;
141 dtr3 &= ~DTR3_TXXXX_MASK;
142 dtr3 |= (2 << 4);
143
144 dtr3 &= ~DTR3_TRWSR_MASK;
145 if (mrc_params->ddr_speed == DDRFREQ_800) {
146
147 dtr3 |= ((tcl - 5 + 1) << 8);
148 } else if (mrc_params->ddr_speed == DDRFREQ_1066) {
149
150 dtr3 |= ((tcl - 5 + 1) << 8);
151 }
152
153 dtr3 &= ~DTR3_TWRSR_MASK;
154 dtr3 |= ((4 + wl + twtr - 11) << 13);
155
156 dtr3 &= ~DTR3_TXP_MASK;
157 if (mrc_params->ddr_speed == DDRFREQ_800)
158 dtr3 |= ((MMAX(0, 1 - 1)) << 22);
159 else
160 dtr3 |= ((MMAX(0, 2 - 1)) << 22);
161
162 dtr4 &= ~DTR4_WRODTSTRT_MASK;
163 dtr4 |= 1;
164 dtr4 &= ~DTR4_WRODTSTOP_MASK;
165 dtr4 |= (1 << 4);
166 dtr4 &= ~DTR4_XXXX1_MASK;
167 dtr4 |= ((1 + tmp1 - tmp2 + 2) << 8);
168 dtr4 &= ~DTR4_XXXX2_MASK;
169 dtr4 |= ((1 + tmp1 - tmp2 + 2) << 12);
170 dtr4 &= ~(DTR4_ODTDIS | DTR4_TRGSTRDIS);
171
172 msg_port_write(MEM_CTLR, DTR0, dtr0);
173 msg_port_write(MEM_CTLR, DTR1, dtr1);
174 msg_port_write(MEM_CTLR, DTR2, dtr2);
175 msg_port_write(MEM_CTLR, DTR3, dtr3);
176 msg_port_write(MEM_CTLR, DTR4, dtr4);
177
178 LEAVEFN();
179}
180
181
182void prog_decode_before_jedec(struct mrc_params *mrc_params)
183{
184 u32 drp;
185 u32 drfc;
186 u32 dcal;
187 u32 dsch;
188 u32 dpmc0;
189
190 ENTERFN();
191
192
193 dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
194 dpmc0 |= (DPMC0_CLKGTDIS | DPMC0_DISPWRDN);
195 dpmc0 &= ~DPMC0_PCLSTO_MASK;
196 dpmc0 &= ~DPMC0_DYNSREN;
197 msg_port_write(MEM_CTLR, DPMC0, dpmc0);
198
199
200 dsch = msg_port_read(MEM_CTLR, DSCH);
201 dsch |= (DSCH_OOODIS | DSCH_NEWBYPDIS);
202 msg_port_write(MEM_CTLR, DSCH, dsch);
203
204
205 drfc = msg_port_read(MEM_CTLR, DRFC);
206 drfc &= ~DRFC_TREFI_MASK;
207 msg_port_write(MEM_CTLR, DRFC, drfc);
208
209
210 dcal = msg_port_read(MEM_CTLR, DCAL);
211 dcal &= ~DCAL_ZQCINT_MASK;
212 dcal &= ~DCAL_SRXZQCL_MASK;
213 msg_port_write(MEM_CTLR, DCAL, dcal);
214
215
216
217
218
219 drp = 0;
220 if (mrc_params->rank_enables & 1)
221 drp |= DRP_RKEN0;
222 if (mrc_params->rank_enables & 2)
223 drp |= DRP_RKEN1;
224 msg_port_write(MEM_CTLR, DRP, drp);
225
226 LEAVEFN();
227}
228
229
230
231
232
233
234
235
236void perform_ddr_reset(struct mrc_params *mrc_params)
237{
238 ENTERFN();
239
240
241 mrc_write_mask(MEM_CTLR, DRMC, DRMC_COLDWAKE, DRMC_COLDWAKE);
242
243
244 dram_wake_command();
245
246
247 msg_port_write(MEM_CTLR, DRMC,
248 mrc_params->rd_odt_value == 0 ? DRMC_ODTMODE : 0);
249
250 LEAVEFN();
251}
252
253
254
255
256
257
258void ddrphy_init(struct mrc_params *mrc_params)
259{
260 uint32_t temp;
261 uint8_t ch;
262 uint8_t rk;
263 uint8_t bl_grp;
264 uint8_t bl_divisor = 1;
265
266 uint8_t speed = mrc_params->ddr_speed & 3;
267 uint8_t cas;
268 uint8_t cwl;
269
270 ENTERFN();
271
272 cas = mrc_params->params.cl;
273 cwl = 5 + mrc_params->ddr_speed;
274
275
276 mrc_post_code(0x03, 0x00);
277
278
279
280
281
282
283
284
285 for (ch = 0; ch < NUM_CHANNELS; ch++) {
286 if (mrc_params->channel_enables & (1 << ch)) {
287
288 mrc_alt_write_mask(DDRPHY,
289 CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET,
290 ~(1 << 20), 1 << 20);
291
292 mrc_alt_write_mask(DDRPHY,
293 CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
294 ~(1 << 2), 1 << 2);
295
296 mrc_alt_write_mask(DDRPHY,
297 CMDPTRREG + ch * DDRIOCCC_CH_OFFSET,
298 ~(1 << 0), 1 << 0);
299 }
300 }
301
302
303 mrc_alt_write_mask(DDRPHY, MASTERRSTN, 0, 1);
304
305
306
307
308 mrc_post_code(0x03, 0x10);
309 for (ch = 0; ch < NUM_CHANNELS; ch++) {
310 if (mrc_params->channel_enables & (1 << ch)) {
311
312 for (bl_grp = 0;
313 bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
314 bl_grp++) {
315
316 mrc_alt_write_mask(DDRPHY,
317 DQOBSCKEBBCTL +
318 bl_grp * DDRIODQ_BL_OFFSET +
319 ch * DDRIODQ_CH_OFFSET,
320 bl_grp ? 0 : (1 << 22), 1 << 22);
321
322
323 switch (mrc_params->rd_odt_value) {
324 case 1:
325 temp = 0x3;
326 break;
327 case 2:
328 temp = 0x3;
329 break;
330 case 3:
331 temp = 0x3;
332 break;
333 default:
334 temp = 0x3;
335 break;
336 }
337
338
339 mrc_alt_write_mask(DDRPHY,
340 B0RXIOBUFCTL +
341 bl_grp * DDRIODQ_BL_OFFSET +
342 ch * DDRIODQ_CH_OFFSET,
343 temp << 5, 0x60);
344
345 mrc_alt_write_mask(DDRPHY,
346 B1RXIOBUFCTL +
347 bl_grp * DDRIODQ_BL_OFFSET +
348 ch * DDRIODQ_CH_OFFSET,
349 temp << 5, 0x60);
350
351
352 temp = (cas << 24) | (cas << 16) |
353 (cas << 8) | (cas << 0);
354 switch (speed) {
355 case 0:
356 temp -= 0x01010101;
357 break;
358 case 1:
359 temp -= 0x02020202;
360 break;
361 case 2:
362 temp -= 0x03030303;
363 break;
364 case 3:
365 temp -= 0x04040404;
366 break;
367 }
368
369
370 mrc_alt_write_mask(DDRPHY,
371 B01LATCTL1 +
372 bl_grp * DDRIODQ_BL_OFFSET +
373 ch * DDRIODQ_CH_OFFSET,
374 temp, 0x1f1f1f1f);
375 switch (speed) {
376
377 case 0:
378 temp = (0x06 << 16) | (0x07 << 8);
379 break;
380 case 1:
381 temp = (0x07 << 16) | (0x08 << 8);
382 break;
383 case 2:
384 temp = (0x09 << 16) | (0x0a << 8);
385 break;
386 case 3:
387 temp = (0x0a << 16) | (0x0b << 8);
388 break;
389 }
390
391
392 mrc_alt_write_mask(DDRPHY,
393 B0ONDURCTL +
394 bl_grp * DDRIODQ_BL_OFFSET +
395 ch * DDRIODQ_CH_OFFSET,
396 temp, 0x003f3f00);
397
398 mrc_alt_write_mask(DDRPHY,
399 B1ONDURCTL +
400 bl_grp * DDRIODQ_BL_OFFSET +
401 ch * DDRIODQ_CH_OFFSET,
402 temp, 0x003f3f00);
403
404 switch (mrc_params->rd_odt_value) {
405 case 0:
406
407 temp = (0x3f << 16) | (0x3f << 10);
408 break;
409 default:
410
411 temp = (0x3f << 16) | (0x2a << 10);
412 break;
413 }
414
415
416 mrc_alt_write_mask(DDRPHY,
417 B0OVRCTL +
418 bl_grp * DDRIODQ_BL_OFFSET +
419 ch * DDRIODQ_CH_OFFSET,
420 temp, 0x003ffc00);
421
422 mrc_alt_write_mask(DDRPHY,
423 B1OVRCTL +
424 bl_grp * DDRIODQ_BL_OFFSET +
425 ch * DDRIODQ_CH_OFFSET,
426 temp, 0x003ffc00);
427
428
429
430
431 mrc_alt_write_mask(DDRPHY,
432 B0LATCTL0 +
433 bl_grp * DDRIODQ_BL_OFFSET +
434 ch * DDRIODQ_CH_OFFSET,
435 ((cas + 7) << 16) | ((cas - 4) << 8) |
436 ((cwl - 2) << 0), 0x003f1f1f);
437 mrc_alt_write_mask(DDRPHY,
438 B1LATCTL0 +
439 bl_grp * DDRIODQ_BL_OFFSET +
440 ch * DDRIODQ_CH_OFFSET,
441 ((cas + 7) << 16) | ((cas - 4) << 8) |
442 ((cwl - 2) << 0), 0x003f1f1f);
443
444
445 mrc_alt_write_mask(DDRPHY,
446 B0RXIOBUFCTL +
447 bl_grp * DDRIODQ_BL_OFFSET +
448 ch * DDRIODQ_CH_OFFSET,
449 0, 0x81);
450 mrc_alt_write_mask(DDRPHY,
451 B1RXIOBUFCTL +
452 bl_grp * DDRIODQ_BL_OFFSET +
453 ch * DDRIODQ_CH_OFFSET,
454 0, 0x81);
455
456
457 mrc_alt_write_mask(DDRPHY,
458 DQCTL +
459 bl_grp * DDRIODQ_BL_OFFSET +
460 ch * DDRIODQ_CH_OFFSET,
461 1 << 16, 1 << 16);
462 mrc_alt_write_mask(DDRPHY,
463 B01PTRCTL1 +
464 bl_grp * DDRIODQ_BL_OFFSET +
465 ch * DDRIODQ_CH_OFFSET,
466 1 << 8, 1 << 8);
467
468
469
470 mrc_alt_write_mask(DDRPHY,
471 B0VREFCTL +
472 bl_grp * DDRIODQ_BL_OFFSET +
473 ch * DDRIODQ_CH_OFFSET,
474 (0x03 << 2) | (0x0 << 1) | (0x0 << 0),
475 0xff);
476
477 mrc_alt_write_mask(DDRPHY,
478 B1VREFCTL +
479 bl_grp * DDRIODQ_BL_OFFSET +
480 ch * DDRIODQ_CH_OFFSET,
481 (0x03 << 2) | (0x0 << 1) | (0x0 << 0),
482 0xff);
483
484 mrc_alt_write_mask(DDRPHY,
485 B0RXIOBUFCTL +
486 bl_grp * DDRIODQ_BL_OFFSET +
487 ch * DDRIODQ_CH_OFFSET,
488 0, 0x10);
489
490 mrc_alt_write_mask(DDRPHY,
491 B1RXIOBUFCTL +
492 bl_grp * DDRIODQ_BL_OFFSET +
493 ch * DDRIODQ_CH_OFFSET,
494 0, 0x10);
495 }
496
497
498 mrc_alt_write_mask(DDRPHY,
499 CMDOBSCKEBBCTL + ch * DDRIOCCC_CH_OFFSET,
500 0, 1 << 23);
501
502
503 mrc_alt_write_mask(DDRPHY,
504 CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
505 0, 0x03);
506
507
508 mrc_alt_write_mask(DDRPHY,
509 CMDRCOMPODT + ch * DDRIOCCC_CH_OFFSET,
510 (0x03 << 5) | (0x03 << 0), 0x3ff);
511
512
513
514
515 mrc_alt_write_mask(DDRPHY,
516 CMDPMDLYREG4 + ch * DDRIOCCC_CH_OFFSET,
517 0xffffffff, 0xffffffff);
518
519
520
521
522
523 mrc_alt_write_mask(DDRPHY,
524 CMDPMDLYREG3 + ch * DDRIOCCC_CH_OFFSET,
525 0xfffff616, 0xffffffff);
526
527 mrc_alt_write_mask(DDRPHY,
528 CMDPMDLYREG2 + ch * DDRIOCCC_CH_OFFSET,
529 0xffffffff, 0xffffffff);
530
531 mrc_alt_write_mask(DDRPHY,
532 CMDPMDLYREG1 + ch * DDRIOCCC_CH_OFFSET,
533 0xffffffff, 0xffffffff);
534
535 mrc_alt_write_mask(DDRPHY,
536 CMDPMDLYREG0 + ch * DDRIOCCC_CH_OFFSET,
537 0xffffffff, 0xffffffff);
538
539 mrc_alt_write_mask(DDRPHY,
540 CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET,
541 (0x6 << 8) | (0x1 << 6) | (0x4 << 0),
542 0xffe00f4f);
543
544 mrc_alt_write_mask(DDRPHY,
545 CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
546 (0x3 << 4) | (0x7 << 0), 0x7f);
547
548
549 mrc_alt_write_mask(DDRPHY,
550 CCOBSCKEBBCTL + ch * DDRIOCCC_CH_OFFSET,
551 0, 1 << 24);
552
553 mrc_alt_write_mask(DDRPHY,
554 CCCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
555 0x1f, 0x000ffff1);
556
557 mrc_alt_write_mask(DDRPHY,
558 CCRCOMPODT + ch * DDRIOCCC_CH_OFFSET,
559 (0x03 << 8) | (0x03 << 0), 0x00001f1f);
560
561 mrc_alt_write_mask(DDRPHY,
562 CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
563 (0x3 << 4) | (0x7 << 0), 0x7f);
564
565
566
567
568
569
570
571
572 mrc_alt_write_mask(DDRPHY,
573 DQVREFCH0 + ch * DDRCOMP_CH_OFFSET,
574 (0x08 << 24) | (0x03 << 16), 0x3f3f0000);
575
576 mrc_alt_write_mask(DDRPHY,
577 CMDVREFCH0 + ch * DDRCOMP_CH_OFFSET,
578 (0x0C << 24) | (0x03 << 16), 0x3f3f0000);
579
580 mrc_alt_write_mask(DDRPHY,
581 CLKVREFCH0 + ch * DDRCOMP_CH_OFFSET,
582 (0x0F << 24) | (0x03 << 16), 0x3f3f0000);
583
584 mrc_alt_write_mask(DDRPHY,
585 DQSVREFCH0 + ch * DDRCOMP_CH_OFFSET,
586 (0x08 << 24) | (0x03 << 16), 0x3f3f0000);
587
588 mrc_alt_write_mask(DDRPHY,
589 CTLVREFCH0 + ch * DDRCOMP_CH_OFFSET,
590 (0x0C << 24) | (0x03 << 16), 0x3f3f0000);
591
592
593 mrc_alt_write_mask(DDRPHY,
594 COMPEN1CH0 + ch * DDRCOMP_CH_OFFSET,
595 (1 << 19) | (1 << 17), 0xc00ac000);
596
597
598
599 mrc_alt_write_mask(DDRPHY,
600 DQVREFCH0 + ch * DDRCOMP_CH_OFFSET,
601 (0x32 << 8) | (0x03 << 0), 0x00003f3f);
602
603 mrc_alt_write_mask(DDRPHY,
604 DQSVREFCH0 + ch * DDRCOMP_CH_OFFSET,
605 (0x32 << 8) | (0x03 << 0), 0x00003f3f);
606
607 mrc_alt_write_mask(DDRPHY,
608 CLKVREFCH0 + ch * DDRCOMP_CH_OFFSET,
609 (0x0E << 8) | (0x05 << 0), 0x00003f3f);
610
611
612
613
614
615
616
617 temp = (0x0e << 16) | (0x0e << 12) | (0x08 << 8) |
618 (0x0b << 4) | (0x0b << 0);
619
620 mrc_alt_write_mask(DDRPHY,
621 DLYSELCH0 + ch * DDRCOMP_CH_OFFSET,
622 temp, 0x000fffff);
623
624 mrc_alt_write_mask(DDRPHY,
625 TCOVREFCH0 + ch * DDRCOMP_CH_OFFSET,
626 (0x05 << 16) | (0x05 << 8) | (0x05 << 0),
627 0x003f3f3f);
628
629 mrc_alt_write_mask(DDRPHY,
630 CCBUFODTCH0 + ch * DDRCOMP_CH_OFFSET,
631 (0x03 << 8) | (0x03 << 0),
632 0x00001f1f);
633
634 mrc_alt_write_mask(DDRPHY,
635 COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET,
636 0, 0xc0000100);
637
638#ifdef BACKUP_COMPS
639
640
641 mrc_alt_write_mask(DDRPHY,
642 DQDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
643 (1 << 31) | (0x0a << 16),
644 0x801f0000);
645
646 mrc_alt_write_mask(DDRPHY,
647 DQDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
648 (1 << 31) | (0x0a << 16),
649 0x801f0000);
650
651 mrc_alt_write_mask(DDRPHY,
652 DQDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
653 (1 << 31) | (0x10 << 16),
654 0x801f0000);
655
656 mrc_alt_write_mask(DDRPHY,
657 DQDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
658 (1 << 31) | (0x10 << 16),
659 0x801f0000);
660
661 mrc_alt_write_mask(DDRPHY,
662 DQODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
663 (1 << 31) | (0x0b << 16),
664 0x801f0000);
665
666 mrc_alt_write_mask(DDRPHY,
667 DQODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
668 (1 << 31) | (0x0b << 16),
669 0x801f0000);
670
671 mrc_alt_write_mask(DDRPHY,
672 DQTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
673 1 << 31, 1 << 31);
674
675 mrc_alt_write_mask(DDRPHY,
676 DQTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
677 1 << 31, 1 << 31);
678
679
680
681 mrc_alt_write_mask(DDRPHY,
682 DQSDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
683 (1 << 31) | (0x0a << 16),
684 0x801f0000);
685
686 mrc_alt_write_mask(DDRPHY,
687 DQSDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
688 (1 << 31) | (0x0a << 16),
689 0x801f0000);
690
691 mrc_alt_write_mask(DDRPHY,
692 DQSDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
693 (1 << 31) | (0x10 << 16),
694 0x801f0000);
695
696 mrc_alt_write_mask(DDRPHY,
697 DQSDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
698 (1 << 31) | (0x10 << 16),
699 0x801f0000);
700
701 mrc_alt_write_mask(DDRPHY,
702 DQSODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
703 (1 << 31) | (0x0b << 16),
704 0x801f0000);
705
706 mrc_alt_write_mask(DDRPHY,
707 DQSODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
708 (1 << 31) | (0x0b << 16),
709 0x801f0000);
710
711 mrc_alt_write_mask(DDRPHY,
712 DQSTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
713 1 << 31, 1 << 31);
714
715 mrc_alt_write_mask(DDRPHY,
716 DQSTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
717 1 << 31, 1 << 31);
718
719
720
721 mrc_alt_write_mask(DDRPHY,
722 CLKDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
723 (1 << 31) | (0x0c << 16),
724 0x801f0000);
725
726 mrc_alt_write_mask(DDRPHY,
727 CLKDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
728 (1 << 31) | (0x0c << 16),
729 0x801f0000);
730
731 mrc_alt_write_mask(DDRPHY,
732 CLKDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
733 (1 << 31) | (0x07 << 16),
734 0x801f0000);
735
736 mrc_alt_write_mask(DDRPHY,
737 CLKDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
738 (1 << 31) | (0x07 << 16),
739 0x801f0000);
740
741 mrc_alt_write_mask(DDRPHY,
742 CLKODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
743 (1 << 31) | (0x0b << 16),
744 0x801f0000);
745
746 mrc_alt_write_mask(DDRPHY,
747 CLKODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
748 (1 << 31) | (0x0b << 16),
749 0x801f0000);
750
751 mrc_alt_write_mask(DDRPHY,
752 CLKTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
753 1 << 31, 1 << 31);
754
755 mrc_alt_write_mask(DDRPHY,
756 CLKTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
757 1 << 31, 1 << 31);
758
759
760
761 mrc_alt_write_mask(DDRPHY,
762 CMDDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
763 (1 << 31) | (0x0d << 16),
764 0x803f0000);
765
766 mrc_alt_write_mask(DDRPHY,
767 CMDDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
768 (1 << 31) | (0x0d << 16),
769 0x803f0000);
770
771 mrc_alt_write_mask(DDRPHY,
772 CMDDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
773 (1 << 31) | (0x0a << 16),
774 0x801f0000);
775
776 mrc_alt_write_mask(DDRPHY,
777 CMDDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
778 (1 << 31) | (0x0a << 16),
779 0x801f0000);
780
781
782
783 mrc_alt_write_mask(DDRPHY,
784 CTLDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
785 (1 << 31) | (0x0d << 16),
786 0x803f0000);
787
788 mrc_alt_write_mask(DDRPHY,
789 CTLDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
790 (1 << 31) | (0x0d << 16),
791 0x803f0000);
792
793 mrc_alt_write_mask(DDRPHY,
794 CTLDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
795 (1 << 31) | (0x0a << 16),
796 0x801f0000);
797
798 mrc_alt_write_mask(DDRPHY,
799 CTLDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
800 (1 << 31) | (0x0a << 16),
801 0x801f0000);
802#else
803
804
805 mrc_alt_write_mask(DDRPHY,
806 DQTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
807 (1 << 31) | (0x1f << 16),
808 0x801f0000);
809
810 mrc_alt_write_mask(DDRPHY,
811 DQTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
812 (1 << 31) | (0x1f << 16),
813 0x801f0000);
814
815
816
817 mrc_alt_write_mask(DDRPHY,
818 DQSTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
819 (1 << 31) | (0x1f << 16),
820 0x801f0000);
821
822 mrc_alt_write_mask(DDRPHY,
823 DQSTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
824 (1 << 31) | (0x1f << 16),
825 0x801f0000);
826
827
828
829 mrc_alt_write_mask(DDRPHY,
830 CLKTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
831 (1 << 31) | (0x1f << 16),
832 0x801f0000);
833
834 mrc_alt_write_mask(DDRPHY,
835 CLKTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
836 (1 << 31) | (0x1f << 16),
837 0x801f0000);
838#endif
839
840
841#ifdef BACKUP_WCMD
842 set_wcmd(ch, ddr_wcmd[PLATFORM_ID]);
843#else
844 set_wcmd(ch, ddr_wclk[PLATFORM_ID] + HALF_CLK);
845#endif
846
847 for (rk = 0; rk < NUM_RANKS; rk++) {
848 if (mrc_params->rank_enables & (1 << rk)) {
849 set_wclk(ch, rk, ddr_wclk[PLATFORM_ID]);
850#ifdef BACKUP_WCTL
851 set_wctl(ch, rk, ddr_wctl[PLATFORM_ID]);
852#else
853 set_wctl(ch, rk, ddr_wclk[PLATFORM_ID] + HALF_CLK);
854#endif
855 }
856 }
857 }
858 }
859
860
861
862 mrc_alt_write_mask(DDRPHY, DQANADRVPUCTL, 1 << 30, 1 << 30);
863
864 mrc_alt_write_mask(DDRPHY, DQANADRVPDCTL, 1 << 30, 1 << 30);
865
866 mrc_alt_write_mask(DDRPHY, CMDANADRVPUCTL, 1 << 30, 1 << 30);
867
868 mrc_alt_write_mask(DDRPHY, CMDANADRVPDCTL, 1 << 30, 1 << 30);
869
870 mrc_alt_write_mask(DDRPHY, CLKANADRVPUCTL, 1 << 30, 1 << 30);
871
872 mrc_alt_write_mask(DDRPHY, CLKANADRVPDCTL, 1 << 30, 1 << 30);
873
874 mrc_alt_write_mask(DDRPHY, DQSANADRVPUCTL, 1 << 30, 1 << 30);
875
876 mrc_alt_write_mask(DDRPHY, DQSANADRVPDCTL, 1 << 30, 1 << 30);
877
878 mrc_alt_write_mask(DDRPHY, CTLANADRVPUCTL, 1 << 30, 1 << 30);
879
880 mrc_alt_write_mask(DDRPHY, CTLANADRVPDCTL, 1 << 30, 1 << 30);
881
882 mrc_alt_write_mask(DDRPHY, DQANAODTPUCTL, 1 << 30, 1 << 30);
883
884 mrc_alt_write_mask(DDRPHY, DQANAODTPDCTL, 1 << 30, 1 << 30);
885
886 mrc_alt_write_mask(DDRPHY, CLKANAODTPUCTL, 1 << 30, 1 << 30);
887
888 mrc_alt_write_mask(DDRPHY, CLKANAODTPDCTL, 1 << 30, 1 << 30);
889
890 mrc_alt_write_mask(DDRPHY, DQSANAODTPUCTL, 1 << 30, 1 << 30);
891
892 mrc_alt_write_mask(DDRPHY, DQSANAODTPDCTL, 1 << 30, 1 << 30);
893
894 mrc_alt_write_mask(DDRPHY, DQANADLYPUCTL, 1 << 30, 1 << 30);
895
896 mrc_alt_write_mask(DDRPHY, DQANADLYPDCTL, 1 << 30, 1 << 30);
897
898 mrc_alt_write_mask(DDRPHY, CMDANADLYPUCTL, 1 << 30, 1 << 30);
899
900 mrc_alt_write_mask(DDRPHY, CMDANADLYPDCTL, 1 << 30, 1 << 30);
901
902 mrc_alt_write_mask(DDRPHY, CLKANADLYPUCTL, 1 << 30, 1 << 30);
903
904 mrc_alt_write_mask(DDRPHY, CLKANADLYPDCTL, 1 << 30, 1 << 30);
905
906 mrc_alt_write_mask(DDRPHY, DQSANADLYPUCTL, 1 << 30, 1 << 30);
907
908 mrc_alt_write_mask(DDRPHY, DQSANADLYPDCTL, 1 << 30, 1 << 30);
909
910 mrc_alt_write_mask(DDRPHY, CTLANADLYPUCTL, 1 << 30, 1 << 30);
911
912 mrc_alt_write_mask(DDRPHY, CTLANADLYPDCTL, 1 << 30, 1 << 30);
913
914 mrc_alt_write_mask(DDRPHY, DQANATCOPUCTL, 1 << 30, 1 << 30);
915
916 mrc_alt_write_mask(DDRPHY, DQANATCOPDCTL, 1 << 30, 1 << 30);
917
918 mrc_alt_write_mask(DDRPHY, CLKANATCOPUCTL, 1 << 30, 1 << 30);
919
920 mrc_alt_write_mask(DDRPHY, CLKANATCOPDCTL, 1 << 30, 1 << 30);
921
922 mrc_alt_write_mask(DDRPHY, DQSANATCOPUCTL, 1 << 30, 1 << 30);
923
924 mrc_alt_write_mask(DDRPHY, DQSANATCOPDCTL, 1 << 30, 1 << 30);
925
926 mrc_alt_write_mask(DDRPHY, TCOCNTCTRL, 1, 3);
927
928 mrc_alt_write_mask(DDRPHY, CHNLBUFSTATIC,
929 (0x03 << 24) | (0x03 << 16), 0x1f1f0000);
930
931 mrc_alt_write_mask(DDRPHY, MSCNTR, 0x64, 0xff);
932 mrc_alt_write_mask(DDRPHY, LATCH1CTL, 0x1 << 28, 0x70000000);
933
934
935 mrc_alt_write_mask(DDRPHY, MASTERRSTN, 1, 1);
936
937
938 mrc_post_code(0x03, 0x11);
939
940 for (ch = 0; ch < NUM_CHANNELS; ch++) {
941 if (mrc_params->channel_enables & (1 << ch)) {
942
943 for (bl_grp = 0;
944 bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
945 bl_grp++) {
946 mrc_alt_write_mask(DDRPHY,
947 DQMDLLCTL +
948 bl_grp * DDRIODQ_BL_OFFSET +
949 ch * DDRIODQ_CH_OFFSET,
950 1 << 13,
951 1 << 13);
952 delay_n(3);
953 }
954
955
956 mrc_alt_write_mask(DDRPHY, ECCMDLLCTL,
957 1 << 13, 1 << 13);
958 delay_n(3);
959
960 mrc_alt_write_mask(DDRPHY,
961 CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
962 1 << 13, 1 << 13);
963 delay_n(3);
964
965 mrc_alt_write_mask(DDRPHY,
966 CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
967 1 << 13, 1 << 13);
968 delay_n(3);
969 }
970 }
971
972
973 mrc_post_code(0x03, 0x12);
974 delay_n(200);
975
976 for (ch = 0; ch < NUM_CHANNELS; ch++) {
977 if (mrc_params->channel_enables & (1 << ch)) {
978
979 for (bl_grp = 0;
980 bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
981 bl_grp++) {
982 mrc_alt_write_mask(DDRPHY,
983 DQMDLLCTL +
984 bl_grp * DDRIODQ_BL_OFFSET +
985 ch * DDRIODQ_CH_OFFSET,
986 1 << 17,
987 1 << 17);
988 delay_n(50);
989 }
990
991
992 mrc_alt_write_mask(DDRPHY, ECCMDLLCTL,
993 1 << 17, 1 << 17);
994 delay_n(50);
995
996 mrc_alt_write_mask(DDRPHY,
997 CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
998 1 << 18, 1 << 18);
999 delay_n(50);
1000
1001 mrc_alt_write_mask(DDRPHY,
1002 CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
1003 1 << 18, 1 << 18);
1004 delay_n(50);
1005 }
1006 }
1007
1008
1009 mrc_post_code(0x03, 0x13);
1010 delay_n(100);
1011
1012 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1013 if (mrc_params->channel_enables & (1 << ch)) {
1014
1015 for (bl_grp = 0;
1016 bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
1017 bl_grp++) {
1018#ifdef FORCE_16BIT_DDRIO
1019 temp = (bl_grp &&
1020 (mrc_params->channel_width == X16)) ?
1021 0x11ff : 0xffff;
1022#else
1023 temp = 0xffff;
1024#endif
1025
1026 mrc_alt_write_mask(DDRPHY,
1027 DQDLLTXCTL +
1028 bl_grp * DDRIODQ_BL_OFFSET +
1029 ch * DDRIODQ_CH_OFFSET,
1030 temp, 0xffff);
1031 delay_n(3);
1032
1033 mrc_alt_write_mask(DDRPHY,
1034 DQDLLRXCTL +
1035 bl_grp * DDRIODQ_BL_OFFSET +
1036 ch * DDRIODQ_CH_OFFSET,
1037 0xf, 0xf);
1038 delay_n(3);
1039
1040 mrc_alt_write_mask(DDRPHY,
1041 B0OVRCTL +
1042 bl_grp * DDRIODQ_BL_OFFSET +
1043 ch * DDRIODQ_CH_OFFSET,
1044 0xf, 0xf);
1045 }
1046
1047
1048 temp = 0xffff;
1049 mrc_alt_write_mask(DDRPHY, ECCDLLTXCTL,
1050 temp, 0xffff);
1051 delay_n(3);
1052
1053
1054 mrc_alt_write_mask(DDRPHY,
1055 CMDDLLTXCTL + ch * DDRIOCCC_CH_OFFSET,
1056 temp, 0xffff);
1057 delay_n(3);
1058 }
1059 }
1060
1061
1062 mrc_post_code(0x03, 0x14);
1063
1064 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1065 if (mrc_params->channel_enables & (1 << ch)) {
1066
1067 for (bl_grp = 0;
1068 bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
1069 bl_grp++) {
1070
1071 mrc_alt_write_mask(DDRPHY,
1072 DQCLKALIGNREG2 +
1073 bl_grp * DDRIODQ_BL_OFFSET +
1074 ch * DDRIODQ_CH_OFFSET,
1075 bl_grp ? 3 : 1,
1076 0xf);
1077 }
1078
1079 mrc_alt_write_mask(DDRPHY,
1080 ECCCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET,
1081 0x2, 0xf);
1082 mrc_alt_write_mask(DDRPHY,
1083 CMDCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET,
1084 0x0, 0xf);
1085 mrc_alt_write_mask(DDRPHY,
1086 CCCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET,
1087 0x2, 0xf);
1088 mrc_alt_write_mask(DDRPHY,
1089 CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET,
1090 0x20, 0x30);
1091
1092
1093
1094
1095 mrc_alt_write_mask(DDRPHY,
1096 CMDCLKALIGNREG1 + ch * DDRIOCCC_CH_OFFSET,
1097 (0x18 << 16) | (0x10 << 8) |
1098 (0x8 << 2) | (0x1 << 0),
1099 0x007f7fff);
1100
1101 mrc_alt_write_mask(DDRPHY,
1102 CMDCLKALIGNREG2 + ch * DDRIOCCC_CH_OFFSET,
1103 (0x10 << 16) | (0x4 << 8) | (0x2 << 4),
1104 0x001f0ff0);
1105#ifdef HMC_TEST
1106
1107 mrc_alt_write_mask(DDRPHY,
1108 CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET,
1109 1 << 24, 1 << 24);
1110 while (msg_port_alt_read(DDRPHY,
1111 CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET) &
1112 (1 << 24))
1113 ;
1114#endif
1115
1116
1117 mrc_alt_write_mask(DDRPHY,
1118 CMDPTRREG + ch * DDRIOCCC_CH_OFFSET,
1119 1, 1);
1120
1121
1122
1123 mrc_alt_write_mask(DDRPHY,
1124 COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET,
1125 1 << 5, 1 << 5);
1126
1127 mrc_alt_write_mask(DDRPHY, CMPCTRL, 1, 1);
1128
1129 while (msg_port_alt_read(DDRPHY, CMPCTRL) & 1)
1130 ;
1131
1132 mrc_alt_write_mask(DDRPHY,
1133 COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET,
1134 ~(1 << 5), 1 << 5);
1135
1136
1137
1138
1139 mrc_alt_write_mask(DDRPHY,
1140 CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
1141 1 << 2, 1 << 2);
1142
1143
1144 mrc_alt_write_mask(DDRPHY,
1145 CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET,
1146 1 << 20, 1 << 20);
1147 }
1148 }
1149
1150 LEAVEFN();
1151}
1152
1153
1154void perform_jedec_init(struct mrc_params *mrc_params)
1155{
1156 uint8_t twr, wl, rank;
1157 uint32_t tck;
1158 u32 dtr0;
1159 u32 drp;
1160 u32 drmc;
1161 u32 mrs0_cmd = 0;
1162 u32 emrs1_cmd = 0;
1163 u32 emrs2_cmd = 0;
1164 u32 emrs3_cmd = 0;
1165
1166 ENTERFN();
1167
1168
1169 mrc_post_code(0x04, 0x00);
1170
1171
1172 mrc_alt_write_mask(DDRPHY, CCDDR3RESETCTL, 2, 0x102);
1173
1174
1175 delay_u(200);
1176
1177
1178 mrc_alt_write_mask(DDRPHY, CCDDR3RESETCTL, 0x100, 0x102);
1179
1180 dtr0 = msg_port_read(MEM_CTLR, DTR0);
1181
1182
1183
1184
1185
1186
1187 drp = msg_port_read(MEM_CTLR, DRP);
1188 drp &= 0x3;
1189
1190 drmc = msg_port_read(MEM_CTLR, DRMC);
1191 drmc &= 0xfffffffc;
1192 drmc |= (DRMC_CKEMODE | drp);
1193
1194 msg_port_write(MEM_CTLR, DRMC, drmc);
1195
1196 for (rank = 0; rank < NUM_RANKS; rank++) {
1197
1198 if ((mrc_params->rank_enables & (1 << rank)) == 0)
1199 continue;
1200
1201 dram_init_command(DCMD_NOP(rank));
1202 }
1203
1204 msg_port_write(MEM_CTLR, DRMC,
1205 (mrc_params->rd_odt_value == 0 ? DRMC_ODTMODE : 0));
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217 emrs2_cmd |= (2 << 3);
1218 wl = 5 + mrc_params->ddr_speed;
1219 emrs2_cmd |= ((wl - 5) << 9);
1220 emrs2_cmd |= (mrc_params->sr_temp_range << 13);
1221
1222
1223
1224
1225
1226
1227
1228 emrs3_cmd |= (3 << 3);
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255 emrs1_cmd |= (1 << 3);
1256 emrs1_cmd &= ~(1 << 6);
1257
1258 if (mrc_params->ron_value == 0)
1259 emrs1_cmd |= (1 << 7);
1260 else
1261 emrs1_cmd &= ~(1 << 7);
1262
1263 if (mrc_params->rtt_nom_value == 0)
1264 emrs1_cmd |= (DDR3_EMRS1_RTTNOM_40 << 6);
1265 else if (mrc_params->rtt_nom_value == 1)
1266 emrs1_cmd |= (DDR3_EMRS1_RTTNOM_60 << 6);
1267 else if (mrc_params->rtt_nom_value == 2)
1268 emrs1_cmd |= (DDR3_EMRS1_RTTNOM_120 << 6);
1269
1270
1271 mrc_params->mrs1 = emrs1_cmd >> 6;
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296 mrs0_cmd |= (1 << 14);
1297 mrs0_cmd |= (1 << 18);
1298 mrs0_cmd |= ((((dtr0 >> 12) & 7) + 1) << 10);
1299
1300 tck = t_ck[mrc_params->ddr_speed];
1301
1302 twr = MCEIL(15000, tck);
1303 mrs0_cmd |= ((twr - 4) << 15);
1304
1305 for (rank = 0; rank < NUM_RANKS; rank++) {
1306
1307 if ((mrc_params->rank_enables & (1 << rank)) == 0)
1308 continue;
1309
1310 emrs2_cmd |= (rank << 22);
1311 dram_init_command(emrs2_cmd);
1312
1313 emrs3_cmd |= (rank << 22);
1314 dram_init_command(emrs3_cmd);
1315
1316 emrs1_cmd |= (rank << 22);
1317 dram_init_command(emrs1_cmd);
1318
1319 mrs0_cmd |= (rank << 22);
1320 dram_init_command(mrs0_cmd);
1321
1322 dram_init_command(DCMD_ZQCL(rank));
1323 }
1324
1325 LEAVEFN();
1326}
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337void set_ddr_init_complete(struct mrc_params *mrc_params)
1338{
1339 u32 dco;
1340
1341 ENTERFN();
1342
1343 dco = msg_port_read(MEM_CTLR, DCO);
1344 dco &= ~DCO_PMICTL;
1345 dco |= DCO_IC;
1346 msg_port_write(MEM_CTLR, DCO, dco);
1347
1348 LEAVEFN();
1349}
1350
1351
1352
1353
1354
1355
1356
1357void restore_timings(struct mrc_params *mrc_params)
1358{
1359 uint8_t ch, rk, bl;
1360 const struct mrc_timings *mt = &mrc_params->timings;
1361
1362 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1363 for (rk = 0; rk < NUM_RANKS; rk++) {
1364 for (bl = 0; bl < NUM_BYTE_LANES; bl++) {
1365 set_rcvn(ch, rk, bl, mt->rcvn[ch][rk][bl]);
1366 set_rdqs(ch, rk, bl, mt->rdqs[ch][rk][bl]);
1367 set_wdqs(ch, rk, bl, mt->wdqs[ch][rk][bl]);
1368 set_wdq(ch, rk, bl, mt->wdq[ch][rk][bl]);
1369 if (rk == 0) {
1370
1371 set_vref(ch, bl, mt->vref[ch][bl]);
1372 }
1373 }
1374 set_wctl(ch, rk, mt->wctl[ch][rk]);
1375 }
1376 set_wcmd(ch, mt->wcmd[ch]);
1377 }
1378}
1379
1380
1381
1382
1383
1384
1385
1386void default_timings(struct mrc_params *mrc_params)
1387{
1388 uint8_t ch, rk, bl;
1389
1390 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1391 for (rk = 0; rk < NUM_RANKS; rk++) {
1392 for (bl = 0; bl < NUM_BYTE_LANES; bl++) {
1393 set_rdqs(ch, rk, bl, 24);
1394 if (rk == 0) {
1395
1396 set_vref(ch, bl, 32);
1397 }
1398 }
1399 }
1400 }
1401}
1402
1403
1404
1405
1406
1407
1408void rcvn_cal(struct mrc_params *mrc_params)
1409{
1410 uint8_t ch;
1411 uint8_t rk;
1412 uint8_t bl;
1413 uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
1414
1415#ifdef R2R_SHARING
1416
1417 uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
1418#ifndef BACKUP_RCVN
1419
1420 uint32_t num_ranks_enabled = 0;
1421#endif
1422#endif
1423
1424#ifdef BACKUP_RCVN
1425#else
1426 uint32_t temp;
1427
1428 uint32_t delay[NUM_BYTE_LANES];
1429 u32 dtr1, dtr1_save;
1430#endif
1431
1432 ENTERFN();
1433
1434
1435 mrc_post_code(0x05, 0x00);
1436
1437#ifndef BACKUP_RCVN
1438
1439 dtr1 = msg_port_read(MEM_CTLR, DTR1);
1440 dtr1_save = dtr1;
1441 dtr1 |= DTR1_TCCD_12CLK;
1442 msg_port_write(MEM_CTLR, DTR1, dtr1);
1443#endif
1444
1445#ifdef R2R_SHARING
1446
1447 memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
1448#endif
1449
1450
1451 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1452 if (mrc_params->channel_enables & (1 << ch)) {
1453
1454 for (rk = 0; rk < NUM_RANKS; rk++) {
1455 if (mrc_params->rank_enables & (1 << rk)) {
1456
1457
1458
1459
1460 mrc_post_code(0x05, 0x10 + ((ch << 4) | rk));
1461
1462#ifdef BACKUP_RCVN
1463
1464 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++)
1465 set_rcvn(ch, rk, bl, ddr_rcvn[PLATFORM_ID]);
1466#else
1467
1468 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl += 2) {
1469 mrc_alt_write_mask(DDRPHY,
1470 B01PTRCTL1 +
1471 (bl >> 1) * DDRIODQ_BL_OFFSET +
1472 ch * DDRIODQ_CH_OFFSET,
1473 0, 1 << 8);
1474 }
1475
1476 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1477
1478 delay[bl] = (4 + 1) * FULL_CLK;
1479
1480 set_rcvn(ch, rk, bl, delay[bl]);
1481 }
1482
1483
1484 find_rising_edge(mrc_params, delay, ch, rk, true);
1485
1486
1487 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1488 delay[bl] += QRTR_CLK;
1489 set_rcvn(ch, rk, bl, delay[bl]);
1490 }
1491
1492 do {
1493 temp = sample_dqs(mrc_params, ch, rk, true);
1494 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1495 if (temp & (1 << bl)) {
1496 if (delay[bl] >= FULL_CLK) {
1497 delay[bl] -= FULL_CLK;
1498 set_rcvn(ch, rk, bl, delay[bl]);
1499 } else {
1500
1501 training_message(ch, rk, bl);
1502 mrc_post_code(0xee, 0x50);
1503 }
1504 }
1505 }
1506 } while (temp & 0xff);
1507
1508#ifdef R2R_SHARING
1509
1510 num_ranks_enabled++;
1511
1512 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1513 delay[bl] += QRTR_CLK;
1514
1515 final_delay[ch][bl] += delay[bl];
1516
1517 set_rcvn(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled);
1518 }
1519#else
1520
1521 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1522 delay[bl] += QRTR_CLK;
1523 set_rcvn(ch, rk, bl, delay[bl]);
1524 }
1525#endif
1526
1527
1528 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl += 2) {
1529 mrc_alt_write_mask(DDRPHY,
1530 B01PTRCTL1 +
1531 (bl >> 1) * DDRIODQ_BL_OFFSET +
1532 ch * DDRIODQ_CH_OFFSET,
1533 1 << 8, 1 << 8);
1534 }
1535#endif
1536 }
1537 }
1538 }
1539 }
1540
1541#ifndef BACKUP_RCVN
1542
1543 msg_port_write(MEM_CTLR, DTR1, dtr1_save);
1544#endif
1545
1546 LEAVEFN();
1547}
1548
1549
1550
1551
1552
1553
1554
1555void wr_level(struct mrc_params *mrc_params)
1556{
1557 uint8_t ch;
1558 uint8_t rk;
1559 uint8_t bl;
1560 uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
1561
1562#ifdef R2R_SHARING
1563
1564 uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
1565#ifndef BACKUP_WDQS
1566
1567 uint32_t num_ranks_enabled = 0;
1568#endif
1569#endif
1570
1571#ifdef BACKUP_WDQS
1572#else
1573
1574 bool all_edges_found;
1575
1576 uint32_t delay[NUM_BYTE_LANES];
1577
1578
1579
1580
1581
1582 uint32_t address;
1583 u32 dtr4, dtr4_save;
1584#endif
1585
1586 ENTERFN();
1587
1588
1589 mrc_post_code(0x06, 0x00);
1590
1591#ifdef R2R_SHARING
1592
1593 memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
1594#endif
1595
1596
1597 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1598 if (mrc_params->channel_enables & (1 << ch)) {
1599
1600 for (rk = 0; rk < NUM_RANKS; rk++) {
1601 if (mrc_params->rank_enables & (1 << rk)) {
1602
1603
1604
1605
1606 mrc_post_code(0x06, 0x10 + ((ch << 4) | rk));
1607
1608#ifdef BACKUP_WDQS
1609 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1610 set_wdqs(ch, rk, bl, ddr_wdqs[PLATFORM_ID]);
1611 set_wdq(ch, rk, bl, ddr_wdqs[PLATFORM_ID] - QRTR_CLK);
1612 }
1613#else
1614
1615
1616
1617
1618 dram_init_command(DCMD_PREA(rk));
1619
1620
1621
1622
1623
1624 dram_init_command(DCMD_MRS1(rk, 0x82));
1625
1626
1627
1628
1629
1630
1631 dtr4 = msg_port_read(MEM_CTLR, DTR4);
1632 dtr4_save = dtr4;
1633 dtr4 |= DTR4_ODTDIS;
1634 msg_port_write(MEM_CTLR, DTR4, dtr4);
1635
1636 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) {
1637
1638
1639
1640
1641 mrc_alt_write_mask(DDRPHY,
1642 DQCTL + DDRIODQ_BL_OFFSET * bl + DDRIODQ_CH_OFFSET * ch,
1643 0x10000154,
1644 0x100003fc);
1645 }
1646
1647
1648 mrc_alt_write_mask(DDRPHY,
1649 CCDDR3RESETCTL + DDRIOCCC_CH_OFFSET * ch,
1650 1 << 16, 1 << 16);
1651
1652
1653 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1654
1655
1656
1657
1658 delay[bl] = get_wclk(ch, rk);
1659
1660 set_wdqs(ch, rk, bl, delay[bl]);
1661 }
1662
1663
1664 find_rising_edge(mrc_params, delay, ch, rk, false);
1665
1666
1667 mrc_alt_write_mask(DDRPHY,
1668 CCDDR3RESETCTL + DDRIOCCC_CH_OFFSET * ch,
1669 0, 1 << 16);
1670
1671 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) {
1672
1673 mrc_alt_write_mask(DDRPHY,
1674 DQCTL + DDRIODQ_BL_OFFSET * bl + DDRIODQ_CH_OFFSET * ch,
1675 0x00000154,
1676 0x100003fc);
1677 }
1678
1679
1680 msg_port_write(MEM_CTLR, DTR4, dtr4_save);
1681
1682
1683
1684
1685
1686 dram_init_command(DCMD_MRS1(rk, mrc_params->mrs1));
1687
1688
1689
1690
1691
1692 dram_init_command(DCMD_PREA(rk));
1693
1694 mrc_post_code(0x06, 0x30 + ((ch << 4) | rk));
1695
1696
1697
1698
1699
1700
1701
1702 mrc_params->hte_setup = 1;
1703
1704
1705 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1706 delay[bl] = get_wdqs(ch, rk, bl) + FULL_CLK;
1707 set_wdqs(ch, rk, bl, delay[bl]);
1708
1709
1710
1711
1712 set_wdq(ch, rk, bl, (delay[bl] - QRTR_CLK));
1713 }
1714
1715
1716 address = get_addr(ch, rk);
1717 do {
1718 uint32_t coarse_result = 0x00;
1719 uint32_t coarse_result_mask = byte_lane_mask(mrc_params);
1720
1721 all_edges_found = true;
1722
1723 mrc_params->hte_setup = 1;
1724 coarse_result = check_rw_coarse(mrc_params, address);
1725
1726
1727 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
1728 if (coarse_result & (coarse_result_mask << bl)) {
1729 all_edges_found = false;
1730 delay[bl] -= FULL_CLK;
1731 set_wdqs(ch, rk, bl, delay[bl]);
1732
1733 set_wdq(ch, rk, bl, delay[bl] - QRTR_CLK);
1734 }
1735 }
1736 } while (!all_edges_found);
1737
1738#ifdef R2R_SHARING
1739
1740 num_ranks_enabled++;
1741
1742 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
1743 final_delay[ch][bl] += delay[bl];
1744 set_wdqs(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled);
1745
1746 set_wdq(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled - QRTR_CLK);
1747 }
1748#endif
1749#endif
1750 }
1751 }
1752 }
1753 }
1754
1755 LEAVEFN();
1756}
1757
1758void prog_page_ctrl(struct mrc_params *mrc_params)
1759{
1760 u32 dpmc0;
1761
1762 ENTERFN();
1763
1764 dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
1765 dpmc0 &= ~DPMC0_PCLSTO_MASK;
1766 dpmc0 |= (4 << 16);
1767 dpmc0 |= DPMC0_PREAPWDEN;
1768 msg_port_write(MEM_CTLR, DPMC0, dpmc0);
1769}
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790void rd_train(struct mrc_params *mrc_params)
1791{
1792 uint8_t ch;
1793 uint8_t rk;
1794 uint8_t bl;
1795 uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
1796#ifdef BACKUP_RDQS
1797#else
1798 uint8_t side_x;
1799 uint8_t side_y;
1800
1801 uint8_t x_coordinate[2][2][NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES];
1802
1803 uint8_t y_coordinate[2][2][NUM_CHANNELS][NUM_BYTE_LANES];
1804
1805 uint8_t x_center[NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES];
1806
1807 uint8_t y_center[NUM_CHANNELS][NUM_BYTE_LANES];
1808 uint32_t address;
1809 uint32_t result;
1810 uint32_t bl_mask;
1811#ifdef R2R_SHARING
1812
1813 uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
1814
1815 uint32_t num_ranks_enabled = 0;
1816#endif
1817#endif
1818
1819
1820 mrc_post_code(0x07, 0x00);
1821
1822 ENTERFN();
1823
1824#ifdef BACKUP_RDQS
1825 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1826 if (mrc_params->channel_enables & (1 << ch)) {
1827 for (rk = 0; rk < NUM_RANKS; rk++) {
1828 if (mrc_params->rank_enables & (1 << rk)) {
1829 for (bl = 0;
1830 bl < NUM_BYTE_LANES / bl_divisor;
1831 bl++) {
1832 set_rdqs(ch, rk, bl, ddr_rdqs[PLATFORM_ID]);
1833 }
1834 }
1835 }
1836 }
1837 }
1838#else
1839
1840 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1841 if (mrc_params->channel_enables & (1 << ch)) {
1842 for (rk = 0; rk < NUM_RANKS; rk++) {
1843 if (mrc_params->rank_enables & (1 << rk)) {
1844 for (bl = 0;
1845 bl < NUM_BYTE_LANES / bl_divisor;
1846 bl++) {
1847
1848 x_coordinate[L][B][ch][rk][bl] = RDQS_MIN;
1849 x_coordinate[R][B][ch][rk][bl] = RDQS_MAX;
1850 x_coordinate[L][T][ch][rk][bl] = RDQS_MIN;
1851 x_coordinate[R][T][ch][rk][bl] = RDQS_MAX;
1852
1853 y_coordinate[L][B][ch][bl] = VREF_MIN;
1854 y_coordinate[R][B][ch][bl] = VREF_MIN;
1855 y_coordinate[L][T][ch][bl] = VREF_MAX;
1856 y_coordinate[R][T][ch][bl] = VREF_MAX;
1857 }
1858 }
1859 }
1860 }
1861 }
1862
1863
1864 bl_mask = byte_lane_mask(mrc_params);
1865 address = get_addr(0, 0);
1866
1867#ifdef R2R_SHARING
1868
1869 memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
1870#endif
1871
1872
1873 for (side_y = B; side_y <= T; side_y++) {
1874 for (side_x = L; side_x <= R; side_x++) {
1875 mrc_post_code(0x07, 0x10 + side_y * 2 + side_x);
1876
1877
1878 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1879 if (mrc_params->channel_enables & (0x1 << ch)) {
1880 for (rk = 0; rk < NUM_RANKS; rk++) {
1881 if (mrc_params->rank_enables &
1882 (0x1 << rk)) {
1883
1884 for (bl = 0;
1885 bl < NUM_BYTE_LANES / bl_divisor;
1886 bl++) {
1887 set_rdqs(ch, rk, bl,
1888 x_coordinate[side_x][side_y][ch][rk][bl]);
1889 set_vref(ch, bl,
1890 y_coordinate[side_x][side_y][ch][bl]);
1891 }
1892
1893
1894 address = get_addr(ch, rk);
1895
1896
1897 mrc_params->hte_setup = 1;
1898
1899
1900 do {
1901
1902 result = check_bls_ex(mrc_params, address);
1903
1904
1905 if (result & 0xff) {
1906
1907 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
1908 if (result &
1909 (bl_mask << bl)) {
1910
1911 if (side_x == L)
1912 x_coordinate[L][side_y][ch][rk][bl] += RDQS_STEP;
1913 else
1914 x_coordinate[R][side_y][ch][rk][bl] -= RDQS_STEP;
1915
1916
1917 if ((x_coordinate[L][side_y][ch][rk][bl] > (RDQS_MAX - MIN_RDQS_EYE)) ||
1918 (x_coordinate[R][side_y][ch][rk][bl] < (RDQS_MIN + MIN_RDQS_EYE)) ||
1919 (x_coordinate[L][side_y][ch][rk][bl] ==
1920 x_coordinate[R][side_y][ch][rk][bl])) {
1921
1922
1923
1924
1925 if (side_y == B)
1926 y_coordinate[side_x][B][ch][bl] += VREF_STEP;
1927 else
1928 y_coordinate[side_x][T][ch][bl] -= VREF_STEP;
1929
1930
1931 if ((y_coordinate[side_x][B][ch][bl] > (VREF_MAX - MIN_VREF_EYE)) ||
1932 (y_coordinate[side_x][T][ch][bl] < (VREF_MIN + MIN_VREF_EYE)) ||
1933 (y_coordinate[side_x][B][ch][bl] == y_coordinate[side_x][T][ch][bl])) {
1934
1935 training_message(ch, rk, bl);
1936 mrc_post_code(0xEE, 0x70 + side_y * 2 + side_x);
1937 } else {
1938
1939 set_vref(ch, bl, y_coordinate[side_x][side_y][ch][bl]);
1940
1941 x_coordinate[side_x][side_y][ch][rk][bl] =
1942 (side_x == L) ? RDQS_MIN : RDQS_MAX;
1943 }
1944 }
1945
1946
1947 set_rdqs(ch, rk, bl, x_coordinate[side_x][side_y][ch][rk][bl]);
1948 }
1949 }
1950 }
1951 } while (result & 0xff);
1952 }
1953 }
1954 }
1955 }
1956 }
1957 }
1958
1959 mrc_post_code(0x07, 0x20);
1960
1961
1962 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1963 if (mrc_params->channel_enables & (1 << ch)) {
1964 for (rk = 0; rk < NUM_RANKS; rk++) {
1965 if (mrc_params->rank_enables & (1 << rk)) {
1966 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1967 uint32_t temp1;
1968 uint32_t temp2;
1969
1970
1971 DPF(D_INFO,
1972 "RDQS T/B eye rank%d lane%d : %d-%d %d-%d\n",
1973 rk, bl,
1974 x_coordinate[L][T][ch][rk][bl],
1975 x_coordinate[R][T][ch][rk][bl],
1976 x_coordinate[L][B][ch][rk][bl],
1977 x_coordinate[R][B][ch][rk][bl]);
1978
1979
1980 temp1 = (x_coordinate[R][T][ch][rk][bl] + x_coordinate[L][T][ch][rk][bl]) / 2;
1981
1982 temp2 = (x_coordinate[R][B][ch][rk][bl] + x_coordinate[L][B][ch][rk][bl]) / 2;
1983
1984 x_center[ch][rk][bl] = (uint8_t) ((temp1 + temp2) / 2);
1985
1986
1987 DPF(D_INFO,
1988 "VREF R/L eye lane%d : %d-%d %d-%d\n",
1989 bl,
1990 y_coordinate[R][B][ch][bl],
1991 y_coordinate[R][T][ch][bl],
1992 y_coordinate[L][B][ch][bl],
1993 y_coordinate[L][T][ch][bl]);
1994
1995
1996 temp1 = (y_coordinate[R][T][ch][bl] + y_coordinate[R][B][ch][bl]) / 2;
1997
1998 temp2 = (y_coordinate[L][T][ch][bl] + y_coordinate[L][B][ch][bl]) / 2;
1999
2000 y_center[ch][bl] = (uint8_t) ((temp1 + temp2) / 2);
2001 }
2002 }
2003 }
2004 }
2005 }
2006
2007#ifdef RX_EYE_CHECK
2008
2009 for (side_y = B; side_y <= T; side_y++) {
2010 for (side_x = L; side_x <= R; side_x++) {
2011 mrc_post_code(0x07, 0x30 + side_y * 2 + side_x);
2012
2013
2014 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2015 if (mrc_params->channel_enables & (1 << ch)) {
2016 for (rk = 0; rk < NUM_RANKS; rk++) {
2017 if (mrc_params->rank_enables & (1 << rk)) {
2018 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
2019 if (side_x == L)
2020 set_rdqs(ch, rk, bl, x_center[ch][rk][bl] - (MIN_RDQS_EYE / 2));
2021 else
2022 set_rdqs(ch, rk, bl, x_center[ch][rk][bl] + (MIN_RDQS_EYE / 2));
2023
2024 if (side_y == B)
2025 set_vref(ch, bl, y_center[ch][bl] - (MIN_VREF_EYE / 2));
2026 else
2027 set_vref(ch, bl, y_center[ch][bl] + (MIN_VREF_EYE / 2));
2028 }
2029 }
2030 }
2031 }
2032 }
2033
2034
2035 mrc_params->hte_setup = 1;
2036
2037
2038 if (check_bls_ex(mrc_params, address) & 0xff) {
2039
2040 mrc_post_code(0xee, 0x74 + side_x * 2 + side_y);
2041 }
2042 }
2043 }
2044#endif
2045
2046 mrc_post_code(0x07, 0x40);
2047
2048
2049 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2050 if (mrc_params->channel_enables & (1 << ch)) {
2051 for (rk = 0; rk < NUM_RANKS; rk++) {
2052 if (mrc_params->rank_enables & (1 << rk)) {
2053#ifdef R2R_SHARING
2054
2055 num_ranks_enabled++;
2056#endif
2057 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
2058
2059#ifdef R2R_SHARING
2060 final_delay[ch][bl] += x_center[ch][rk][bl];
2061 set_rdqs(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled);
2062#else
2063 set_rdqs(ch, rk, bl, x_center[ch][rk][bl]);
2064#endif
2065
2066 set_vref(ch, bl, y_center[ch][bl]);
2067 }
2068 }
2069 }
2070 }
2071 }
2072#endif
2073
2074 LEAVEFN();
2075}
2076
2077
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088void wr_train(struct mrc_params *mrc_params)
2089{
2090 uint8_t ch;
2091 uint8_t rk;
2092 uint8_t bl;
2093 uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
2094#ifdef BACKUP_WDQ
2095#else
2096 uint8_t side;
2097 uint32_t temp;
2098
2099 uint32_t delay[2][NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES];
2100 uint32_t address;
2101 uint32_t result;
2102 uint32_t bl_mask;
2103#ifdef R2R_SHARING
2104
2105 uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
2106
2107 uint32_t num_ranks_enabled = 0;
2108#endif
2109#endif
2110
2111
2112 mrc_post_code(0x08, 0x00);
2113
2114 ENTERFN();
2115
2116#ifdef BACKUP_WDQ
2117 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2118 if (mrc_params->channel_enables & (1 << ch)) {
2119 for (rk = 0; rk < NUM_RANKS; rk++) {
2120 if (mrc_params->rank_enables & (1 << rk)) {
2121 for (bl = 0;
2122 bl < NUM_BYTE_LANES / bl_divisor;
2123 bl++) {
2124 set_wdq(ch, rk, bl, ddr_wdq[PLATFORM_ID]);
2125 }
2126 }
2127 }
2128 }
2129 }
2130#else
2131
2132 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2133 if (mrc_params->channel_enables & (1 << ch)) {
2134 for (rk = 0; rk < NUM_RANKS; rk++) {
2135 if (mrc_params->rank_enables & (1 << rk)) {
2136 for (bl = 0;
2137 bl < NUM_BYTE_LANES / bl_divisor;
2138 bl++) {
2139
2140
2141
2142
2143
2144 temp = get_wdqs(ch, rk, bl) - QRTR_CLK;
2145 delay[L][ch][rk][bl] = temp - QRTR_CLK;
2146 delay[R][ch][rk][bl] = temp + QRTR_CLK;
2147 }
2148 }
2149 }
2150 }
2151 }
2152
2153
2154 bl_mask = byte_lane_mask(mrc_params);
2155 address = get_addr(0, 0);
2156
2157#ifdef R2R_SHARING
2158
2159 memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
2160#endif
2161
2162
2163
2164
2165
2166 for (side = L; side <= R; side++) {
2167 mrc_post_code(0x08, 0x10 + side);
2168
2169
2170 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2171 if (mrc_params->channel_enables & (1 << ch)) {
2172 for (rk = 0; rk < NUM_RANKS; rk++) {
2173 if (mrc_params->rank_enables &
2174 (1 << rk)) {
2175 for (bl = 0;
2176 bl < NUM_BYTE_LANES / bl_divisor;
2177 bl++) {
2178 set_wdq(ch, rk, bl, delay[side][ch][rk][bl]);
2179 }
2180 }
2181 }
2182 }
2183 }
2184
2185
2186 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2187 if (mrc_params->channel_enables & (1 << ch)) {
2188 for (rk = 0; rk < NUM_RANKS; rk++) {
2189 if (mrc_params->rank_enables &
2190 (1 << rk)) {
2191
2192 address = get_addr(ch, rk);
2193
2194
2195 mrc_params->hte_setup = 1;
2196
2197
2198 do {
2199
2200 result = check_bls_ex(mrc_params, address);
2201
2202 if (result & 0xff) {
2203
2204 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
2205 if (result &
2206 (bl_mask << bl)) {
2207 if (side == L)
2208 delay[L][ch][rk][bl] += WDQ_STEP;
2209 else
2210 delay[R][ch][rk][bl] -= WDQ_STEP;
2211
2212
2213 if (delay[L][ch][rk][bl] != delay[R][ch][rk][bl]) {
2214
2215
2216
2217
2218 set_wdq(ch, rk, bl,
2219 delay[side][ch][rk][bl]);
2220 } else {
2221
2222
2223
2224
2225 training_message(ch, rk, bl);
2226 mrc_post_code(0xee, 0x80 + side);
2227 }
2228 }
2229 }
2230 }
2231
2232 } while (result & 0xff);
2233 }
2234 }
2235 }
2236 }
2237 }
2238
2239
2240 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2241 if (mrc_params->channel_enables & (1 << ch)) {
2242 for (rk = 0; rk < NUM_RANKS; rk++) {
2243 if (mrc_params->rank_enables & (1 << rk)) {
2244#ifdef R2R_SHARING
2245
2246 num_ranks_enabled++;
2247#endif
2248 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
2249 DPF(D_INFO,
2250 "WDQ eye rank%d lane%d : %d-%d\n",
2251 rk, bl,
2252 delay[L][ch][rk][bl],
2253 delay[R][ch][rk][bl]);
2254
2255 temp = (delay[R][ch][rk][bl] + delay[L][ch][rk][bl]) / 2;
2256
2257#ifdef R2R_SHARING
2258 final_delay[ch][bl] += temp;
2259 set_wdq(ch, rk, bl,
2260 final_delay[ch][bl] / num_ranks_enabled);
2261#else
2262 set_wdq(ch, rk, bl, temp);
2263#endif
2264 }
2265 }
2266 }
2267 }
2268 }
2269#endif
2270
2271 LEAVEFN();
2272}
2273
2274
2275
2276
2277
2278
2279
2280void store_timings(struct mrc_params *mrc_params)
2281{
2282 uint8_t ch, rk, bl;
2283 struct mrc_timings *mt = &mrc_params->timings;
2284
2285 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2286 for (rk = 0; rk < NUM_RANKS; rk++) {
2287 for (bl = 0; bl < NUM_BYTE_LANES; bl++) {
2288 mt->rcvn[ch][rk][bl] = get_rcvn(ch, rk, bl);
2289 mt->rdqs[ch][rk][bl] = get_rdqs(ch, rk, bl);
2290 mt->wdqs[ch][rk][bl] = get_wdqs(ch, rk, bl);
2291 mt->wdq[ch][rk][bl] = get_wdq(ch, rk, bl);
2292
2293 if (rk == 0)
2294 mt->vref[ch][bl] = get_vref(ch, bl);
2295 }
2296
2297 mt->wctl[ch][rk] = get_wctl(ch, rk);
2298 }
2299
2300 mt->wcmd[ch] = get_wcmd(ch);
2301 }
2302
2303
2304 mt->ddr_speed = mrc_params->ddr_speed;
2305}
2306
2307
2308
2309
2310
2311void enable_scrambling(struct mrc_params *mrc_params)
2312{
2313 uint32_t lfsr = 0;
2314 uint8_t i;
2315
2316 if (mrc_params->scrambling_enables == 0)
2317 return;
2318
2319 ENTERFN();
2320
2321
2322 lfsr = mrc_params->timings.scrambler_seed;
2323
2324 if (mrc_params->boot_mode == BM_COLD) {
2325
2326
2327
2328
2329 if (lfsr == 0) {
2330
2331
2332
2333
2334 lfsr = rdtsc() & 0x0fffffff;
2335 } else {
2336
2337
2338
2339
2340
2341
2342 for (i = 0; i < 16; i++)
2343 lfsr32(&lfsr);
2344 }
2345
2346
2347 mrc_params->timings.scrambler_seed = lfsr;
2348 }
2349
2350
2351
2352
2353
2354 lfsr32(&lfsr);
2355 msg_port_write(MEM_CTLR, SCRMSEED, (lfsr & 0x0003ffff));
2356
2357 for (i = 0; i < 2; i++)
2358 msg_port_write(MEM_CTLR, SCRMLO + i, (lfsr & 0xaaaaaaaa));
2359
2360 LEAVEFN();
2361}
2362
2363
2364
2365
2366
2367void prog_ddr_control(struct mrc_params *mrc_params)
2368{
2369 u32 dsch;
2370 u32 dpmc0;
2371
2372 ENTERFN();
2373
2374 dsch = msg_port_read(MEM_CTLR, DSCH);
2375 dsch &= ~(DSCH_OOODIS | DSCH_OOOST3DIS | DSCH_NEWBYPDIS);
2376 msg_port_write(MEM_CTLR, DSCH, dsch);
2377
2378 dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
2379 dpmc0 &= ~DPMC0_DISPWRDN;
2380 dpmc0 |= (mrc_params->power_down_disable << 25);
2381 dpmc0 &= ~DPMC0_CLKGTDIS;
2382 dpmc0 &= ~DPMC0_PCLSTO_MASK;
2383 dpmc0 |= (4 << 16);
2384 dpmc0 |= DPMC0_PREAPWDEN;
2385 msg_port_write(MEM_CTLR, DPMC0, dpmc0);
2386
2387
2388 mrc_write_mask(MEM_CTLR, DPMC1, 0x20, 0x30);
2389
2390 LEAVEFN();
2391}
2392
2393
2394
2395
2396
2397void prog_dra_drb(struct mrc_params *mrc_params)
2398{
2399 u32 drp;
2400 u32 dco;
2401 u8 density = mrc_params->params.density;
2402
2403 ENTERFN();
2404
2405 dco = msg_port_read(MEM_CTLR, DCO);
2406 dco &= ~DCO_IC;
2407 msg_port_write(MEM_CTLR, DCO, dco);
2408
2409 drp = 0;
2410 if (mrc_params->rank_enables & 1)
2411 drp |= DRP_RKEN0;
2412 if (mrc_params->rank_enables & 2)
2413 drp |= DRP_RKEN1;
2414 if (mrc_params->dram_width == X16) {
2415 drp |= (1 << 4);
2416 drp |= (1 << 9);
2417 }
2418
2419
2420
2421
2422
2423 if (density == 0)
2424 density = 4;
2425
2426 drp |= ((density - 1) << 6);
2427 drp |= ((density - 1) << 11);
2428
2429
2430 drp |= (mrc_params->address_mode << 14);
2431
2432 msg_port_write(MEM_CTLR, DRP, drp);
2433
2434 dco &= ~DCO_PMICTL;
2435 dco |= DCO_IC;
2436 msg_port_write(MEM_CTLR, DCO, dco);
2437
2438 LEAVEFN();
2439}
2440
2441
2442void perform_wake(struct mrc_params *mrc_params)
2443{
2444 ENTERFN();
2445
2446 dram_wake_command();
2447
2448 LEAVEFN();
2449}
2450
2451
2452
2453
2454
2455void change_refresh_period(struct mrc_params *mrc_params)
2456{
2457 u32 drfc;
2458 u32 dcal;
2459 u32 dpmc0;
2460
2461 ENTERFN();
2462
2463 drfc = msg_port_read(MEM_CTLR, DRFC);
2464 drfc &= ~DRFC_TREFI_MASK;
2465 drfc |= (mrc_params->refresh_rate << 12);
2466 drfc |= DRFC_REFDBTCLR;
2467 msg_port_write(MEM_CTLR, DRFC, drfc);
2468
2469 dcal = msg_port_read(MEM_CTLR, DCAL);
2470 dcal &= ~DCAL_ZQCINT_MASK;
2471 dcal |= (3 << 8);
2472 msg_port_write(MEM_CTLR, DCAL, dcal);
2473
2474 dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
2475 dpmc0 |= (DPMC0_DYNSREN | DPMC0_ENPHYCLKGATE);
2476 msg_port_write(MEM_CTLR, DPMC0, dpmc0);
2477
2478 LEAVEFN();
2479}
2480
2481
2482
2483
2484
2485void set_auto_refresh(struct mrc_params *mrc_params)
2486{
2487 uint32_t channel;
2488 uint32_t rank;
2489 uint32_t bl;
2490 uint32_t bl_divisor = 1;
2491 uint32_t temp;
2492
2493 ENTERFN();
2494
2495
2496
2497
2498
2499 for (channel = 0; channel < NUM_CHANNELS; channel++) {
2500 if (mrc_params->channel_enables & (1 << channel)) {
2501
2502 mrc_alt_write_mask(DDRPHY, CMPCTRL, 2, 2);
2503
2504
2505 switch (mrc_params->rd_odt_value) {
2506 case 0:
2507 temp = 0x3f;
2508 break;
2509 default:
2510 temp = 0x00;
2511 break;
2512 }
2513
2514 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) {
2515
2516 mrc_alt_write_mask(DDRPHY,
2517 B0OVRCTL + bl * DDRIODQ_BL_OFFSET +
2518 channel * DDRIODQ_CH_OFFSET,
2519 temp << 10,
2520 0x003ffc00);
2521
2522
2523 mrc_alt_write_mask(DDRPHY,
2524 B1OVRCTL + bl * DDRIODQ_BL_OFFSET +
2525 channel * DDRIODQ_CH_OFFSET,
2526 temp << 10,
2527 0x003ffc00);
2528 }
2529
2530
2531 for (rank = 0; rank < NUM_RANKS; rank++) {
2532 if (mrc_params->rank_enables & (1 << rank))
2533 dram_init_command(DCMD_ZQCS(rank));
2534 }
2535 }
2536 }
2537
2538 clear_pointers();
2539
2540 LEAVEFN();
2541}
2542
2543
2544
2545
2546
2547
2548
2549void ecc_enable(struct mrc_params *mrc_params)
2550{
2551 u32 drp;
2552 u32 dsch;
2553 u32 ecc_ctrl;
2554
2555 if (mrc_params->ecc_enables == 0)
2556 return;
2557
2558 ENTERFN();
2559
2560
2561 drp = msg_port_read(MEM_CTLR, DRP);
2562 drp &= ~DRP_ADDRMAP_MASK;
2563 drp |= DRP_ADDRMAP_MAP1;
2564 drp |= DRP_PRI64BSPLITEN;
2565 msg_port_write(MEM_CTLR, DRP, drp);
2566
2567
2568 dsch = msg_port_read(MEM_CTLR, DSCH);
2569 dsch |= DSCH_NEWBYPDIS;
2570 msg_port_write(MEM_CTLR, DSCH, dsch);
2571
2572
2573 ecc_ctrl = (DECCCTRL_SBEEN | DECCCTRL_DBEEN | DECCCTRL_ENCBGEN);
2574 msg_port_write(MEM_CTLR, DECCCTRL, ecc_ctrl);
2575
2576
2577 mrc_params->mem_size -= mrc_params->mem_size / 8;
2578
2579
2580 if (mrc_params->boot_mode != BM_S3) {
2581 select_hte();
2582 hte_mem_init(mrc_params, MRC_MEM_INIT);
2583 select_mem_mgr();
2584 }
2585
2586 LEAVEFN();
2587}
2588
2589
2590
2591
2592
2593void memory_test(struct mrc_params *mrc_params)
2594{
2595 uint32_t result = 0;
2596
2597 ENTERFN();
2598
2599 select_hte();
2600 result = hte_mem_init(mrc_params, MRC_MEM_TEST);
2601 select_mem_mgr();
2602
2603 DPF(D_INFO, "Memory test result %x\n", result);
2604 mrc_params->status = ((result == 0) ? MRC_SUCCESS : MRC_E_MEMTEST);
2605 LEAVEFN();
2606}
2607
2608
2609void lock_registers(struct mrc_params *mrc_params)
2610{
2611 u32 dco;
2612
2613 ENTERFN();
2614
2615 dco = msg_port_read(MEM_CTLR, DCO);
2616 dco &= ~(DCO_PMICTL | DCO_PMIDIS);
2617 dco |= (DCO_DRPLOCK | DCO_CPGCLOCK);
2618 msg_port_write(MEM_CTLR, DCO, dco);
2619
2620 LEAVEFN();
2621}
2622