1
2
3
4
5
6
7
8
9
10#include <common.h>
11#include <pci.h>
12#include <asm/arch/device.h>
13#include <asm/arch/mrc.h>
14#include <asm/arch/msg_port.h>
15#include "mrc_util.h"
16#include "hte.h"
17#include "smc.h"
18
19
20static const uint32_t t_ck[3] = {
21 2500,
22 1875,
23 1500
24};
25
26
27static const uint16_t ddr_wclk[] = {193, 158};
28#ifdef BACKUP_WCTL
29static const uint16_t ddr_wctl[] = {1, 217};
30#endif
31#ifdef BACKUP_WCMD
32static const uint16_t ddr_wcmd[] = {1, 220};
33#endif
34
35#ifdef BACKUP_RCVN
36static const uint16_t ddr_rcvn[] = {129, 498};
37#endif
38
39#ifdef BACKUP_WDQS
40static const uint16_t ddr_wdqs[] = {65, 289};
41#endif
42
43#ifdef BACKUP_RDQS
44static const uint8_t ddr_rdqs[] = {32, 24};
45#endif
46
47#ifdef BACKUP_WDQ
48static const uint16_t ddr_wdq[] = {32, 257};
49#endif
50
51
52void clear_self_refresh(struct mrc_params *mrc_params)
53{
54 ENTERFN();
55
56
57 mrc_write_mask(MEM_CTLR, PMSTS, PMSTS_DISR, PMSTS_DISR);
58
59 LEAVEFN();
60}
61
62
63void prog_ddr_timing_control(struct mrc_params *mrc_params)
64{
65 uint8_t tcl, wl;
66 uint8_t trp, trcd, tras, twr, twtr, trrd, trtp, tfaw;
67 uint32_t tck;
68 u32 dtr0, dtr1, dtr2, dtr3, dtr4;
69 u32 tmp1, tmp2;
70
71 ENTERFN();
72
73
74 mrc_post_code(0x02, 0x00);
75
76 dtr0 = msg_port_read(MEM_CTLR, DTR0);
77 dtr1 = msg_port_read(MEM_CTLR, DTR1);
78 dtr2 = msg_port_read(MEM_CTLR, DTR2);
79 dtr3 = msg_port_read(MEM_CTLR, DTR3);
80 dtr4 = msg_port_read(MEM_CTLR, DTR4);
81
82 tck = t_ck[mrc_params->ddr_speed];
83 tcl = mrc_params->params.cl;
84 trp = tcl;
85 trcd = tcl;
86 tras = MCEIL(mrc_params->params.ras, tck);
87
88
89 twr = MCEIL(15000, tck);
90
91 twtr = MCEIL(mrc_params->params.wtr, tck);
92 trrd = MCEIL(mrc_params->params.rrd, tck);
93 trtp = 4;
94 tfaw = MCEIL(mrc_params->params.faw, tck);
95
96 wl = 5 + mrc_params->ddr_speed;
97
98 dtr0 &= ~DTR0_DFREQ_MASK;
99 dtr0 |= mrc_params->ddr_speed;
100 dtr0 &= ~DTR0_TCL_MASK;
101 tmp1 = tcl - 5;
102 dtr0 |= ((tcl - 5) << 12);
103 dtr0 &= ~DTR0_TRP_MASK;
104 dtr0 |= ((trp - 5) << 4);
105 dtr0 &= ~DTR0_TRCD_MASK;
106 dtr0 |= ((trcd - 5) << 8);
107
108 dtr1 &= ~DTR1_TWCL_MASK;
109 tmp2 = wl - 3;
110 dtr1 |= (wl - 3);
111 dtr1 &= ~DTR1_TWTP_MASK;
112 dtr1 |= ((wl + 4 + twr - 14) << 8);
113 dtr1 &= ~DTR1_TRTP_MASK;
114 dtr1 |= ((MMAX(trtp, 4) - 3) << 28);
115 dtr1 &= ~DTR1_TRRD_MASK;
116 dtr1 |= ((trrd - 4) << 24);
117 dtr1 &= ~DTR1_TCMD_MASK;
118 dtr1 |= (1 << 4);
119 dtr1 &= ~DTR1_TRAS_MASK;
120 dtr1 |= ((tras - 14) << 20);
121 dtr1 &= ~DTR1_TFAW_MASK;
122 dtr1 |= ((((tfaw + 1) >> 1) - 5) << 16);
123
124 dtr1 &= ~DTR1_TCCD_MASK;
125
126 dtr2 &= ~DTR2_TRRDR_MASK;
127 dtr2 |= 1;
128 dtr2 &= ~DTR2_TWWDR_MASK;
129 dtr2 |= (2 << 8);
130 dtr2 &= ~DTR2_TRWDR_MASK;
131 dtr2 |= (2 << 16);
132
133 dtr3 &= ~DTR3_TWRDR_MASK;
134 dtr3 |= 2;
135 dtr3 &= ~DTR3_TXXXX_MASK;
136 dtr3 |= (2 << 4);
137
138 dtr3 &= ~DTR3_TRWSR_MASK;
139 if (mrc_params->ddr_speed == DDRFREQ_800) {
140
141 dtr3 |= ((tcl - 5 + 1) << 8);
142 } else if (mrc_params->ddr_speed == DDRFREQ_1066) {
143
144 dtr3 |= ((tcl - 5 + 1) << 8);
145 }
146
147 dtr3 &= ~DTR3_TWRSR_MASK;
148 dtr3 |= ((4 + wl + twtr - 11) << 13);
149
150 dtr3 &= ~DTR3_TXP_MASK;
151 if (mrc_params->ddr_speed == DDRFREQ_800)
152 dtr3 |= ((MMAX(0, 1 - 1)) << 22);
153 else
154 dtr3 |= ((MMAX(0, 2 - 1)) << 22);
155
156 dtr4 &= ~DTR4_WRODTSTRT_MASK;
157 dtr4 |= 1;
158 dtr4 &= ~DTR4_WRODTSTOP_MASK;
159 dtr4 |= (1 << 4);
160 dtr4 &= ~DTR4_XXXX1_MASK;
161 dtr4 |= ((1 + tmp1 - tmp2 + 2) << 8);
162 dtr4 &= ~DTR4_XXXX2_MASK;
163 dtr4 |= ((1 + tmp1 - tmp2 + 2) << 12);
164 dtr4 &= ~(DTR4_ODTDIS | DTR4_TRGSTRDIS);
165
166 msg_port_write(MEM_CTLR, DTR0, dtr0);
167 msg_port_write(MEM_CTLR, DTR1, dtr1);
168 msg_port_write(MEM_CTLR, DTR2, dtr2);
169 msg_port_write(MEM_CTLR, DTR3, dtr3);
170 msg_port_write(MEM_CTLR, DTR4, dtr4);
171
172 LEAVEFN();
173}
174
175
176void prog_decode_before_jedec(struct mrc_params *mrc_params)
177{
178 u32 drp;
179 u32 drfc;
180 u32 dcal;
181 u32 dsch;
182 u32 dpmc0;
183
184 ENTERFN();
185
186
187 dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
188 dpmc0 |= (DPMC0_CLKGTDIS | DPMC0_DISPWRDN);
189 dpmc0 &= ~DPMC0_PCLSTO_MASK;
190 dpmc0 &= ~DPMC0_DYNSREN;
191 msg_port_write(MEM_CTLR, DPMC0, dpmc0);
192
193
194 dsch = msg_port_read(MEM_CTLR, DSCH);
195 dsch |= (DSCH_OOODIS | DSCH_NEWBYPDIS);
196 msg_port_write(MEM_CTLR, DSCH, dsch);
197
198
199 drfc = msg_port_read(MEM_CTLR, DRFC);
200 drfc &= ~DRFC_TREFI_MASK;
201 msg_port_write(MEM_CTLR, DRFC, drfc);
202
203
204 dcal = msg_port_read(MEM_CTLR, DCAL);
205 dcal &= ~DCAL_ZQCINT_MASK;
206 dcal &= ~DCAL_SRXZQCL_MASK;
207 msg_port_write(MEM_CTLR, DCAL, dcal);
208
209
210
211
212
213 drp = 0;
214 if (mrc_params->rank_enables & 1)
215 drp |= DRP_RKEN0;
216 if (mrc_params->rank_enables & 2)
217 drp |= DRP_RKEN1;
218 msg_port_write(MEM_CTLR, DRP, drp);
219
220 LEAVEFN();
221}
222
223
224
225
226
227
228
229
230void perform_ddr_reset(struct mrc_params *mrc_params)
231{
232 ENTERFN();
233
234
235 mrc_write_mask(MEM_CTLR, DRMC, DRMC_COLDWAKE, DRMC_COLDWAKE);
236
237
238 dram_wake_command();
239
240
241 msg_port_write(MEM_CTLR, DRMC,
242 mrc_params->rd_odt_value == 0 ? DRMC_ODTMODE : 0);
243
244 LEAVEFN();
245}
246
247
248
249
250
251
252void ddrphy_init(struct mrc_params *mrc_params)
253{
254 uint32_t temp;
255 uint8_t ch;
256 uint8_t rk;
257 uint8_t bl_grp;
258 uint8_t bl_divisor = 1;
259
260 uint8_t speed = mrc_params->ddr_speed & 3;
261 uint8_t cas;
262 uint8_t cwl;
263
264 ENTERFN();
265
266 cas = mrc_params->params.cl;
267 cwl = 5 + mrc_params->ddr_speed;
268
269
270 mrc_post_code(0x03, 0x00);
271
272
273
274
275
276
277
278
279 for (ch = 0; ch < NUM_CHANNELS; ch++) {
280 if (mrc_params->channel_enables & (1 << ch)) {
281
282 mrc_alt_write_mask(DDRPHY,
283 CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET,
284 ~(1 << 20), 1 << 20);
285
286 mrc_alt_write_mask(DDRPHY,
287 CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
288 ~(1 << 2), 1 << 2);
289
290 mrc_alt_write_mask(DDRPHY,
291 CMDPTRREG + ch * DDRIOCCC_CH_OFFSET,
292 ~(1 << 0), 1 << 0);
293 }
294 }
295
296
297 mrc_alt_write_mask(DDRPHY, MASTERRSTN, 0, 1);
298
299
300
301
302 mrc_post_code(0x03, 0x10);
303 for (ch = 0; ch < NUM_CHANNELS; ch++) {
304 if (mrc_params->channel_enables & (1 << ch)) {
305
306 for (bl_grp = 0;
307 bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
308 bl_grp++) {
309
310 mrc_alt_write_mask(DDRPHY,
311 DQOBSCKEBBCTL +
312 bl_grp * DDRIODQ_BL_OFFSET +
313 ch * DDRIODQ_CH_OFFSET,
314 bl_grp ? 0 : (1 << 22), 1 << 22);
315
316
317 switch (mrc_params->rd_odt_value) {
318 case 1:
319 temp = 0x3;
320 break;
321 case 2:
322 temp = 0x3;
323 break;
324 case 3:
325 temp = 0x3;
326 break;
327 default:
328 temp = 0x3;
329 break;
330 }
331
332
333 mrc_alt_write_mask(DDRPHY,
334 B0RXIOBUFCTL +
335 bl_grp * DDRIODQ_BL_OFFSET +
336 ch * DDRIODQ_CH_OFFSET,
337 temp << 5, 0x60);
338
339 mrc_alt_write_mask(DDRPHY,
340 B1RXIOBUFCTL +
341 bl_grp * DDRIODQ_BL_OFFSET +
342 ch * DDRIODQ_CH_OFFSET,
343 temp << 5, 0x60);
344
345
346 temp = (cas << 24) | (cas << 16) |
347 (cas << 8) | (cas << 0);
348 switch (speed) {
349 case 0:
350 temp -= 0x01010101;
351 break;
352 case 1:
353 temp -= 0x02020202;
354 break;
355 case 2:
356 temp -= 0x03030303;
357 break;
358 case 3:
359 temp -= 0x04040404;
360 break;
361 }
362
363
364 mrc_alt_write_mask(DDRPHY,
365 B01LATCTL1 +
366 bl_grp * DDRIODQ_BL_OFFSET +
367 ch * DDRIODQ_CH_OFFSET,
368 temp, 0x1f1f1f1f);
369 switch (speed) {
370
371 case 0:
372 temp = (0x06 << 16) | (0x07 << 8);
373 break;
374 case 1:
375 temp = (0x07 << 16) | (0x08 << 8);
376 break;
377 case 2:
378 temp = (0x09 << 16) | (0x0a << 8);
379 break;
380 case 3:
381 temp = (0x0a << 16) | (0x0b << 8);
382 break;
383 }
384
385
386 mrc_alt_write_mask(DDRPHY,
387 B0ONDURCTL +
388 bl_grp * DDRIODQ_BL_OFFSET +
389 ch * DDRIODQ_CH_OFFSET,
390 temp, 0x003f3f00);
391
392 mrc_alt_write_mask(DDRPHY,
393 B1ONDURCTL +
394 bl_grp * DDRIODQ_BL_OFFSET +
395 ch * DDRIODQ_CH_OFFSET,
396 temp, 0x003f3f00);
397
398 switch (mrc_params->rd_odt_value) {
399 case 0:
400
401 temp = (0x3f << 16) | (0x3f << 10);
402 break;
403 default:
404
405 temp = (0x3f << 16) | (0x2a << 10);
406 break;
407 }
408
409
410 mrc_alt_write_mask(DDRPHY,
411 B0OVRCTL +
412 bl_grp * DDRIODQ_BL_OFFSET +
413 ch * DDRIODQ_CH_OFFSET,
414 temp, 0x003ffc00);
415
416 mrc_alt_write_mask(DDRPHY,
417 B1OVRCTL +
418 bl_grp * DDRIODQ_BL_OFFSET +
419 ch * DDRIODQ_CH_OFFSET,
420 temp, 0x003ffc00);
421
422
423
424
425 mrc_alt_write_mask(DDRPHY,
426 B0LATCTL0 +
427 bl_grp * DDRIODQ_BL_OFFSET +
428 ch * DDRIODQ_CH_OFFSET,
429 ((cas + 7) << 16) | ((cas - 4) << 8) |
430 ((cwl - 2) << 0), 0x003f1f1f);
431 mrc_alt_write_mask(DDRPHY,
432 B1LATCTL0 +
433 bl_grp * DDRIODQ_BL_OFFSET +
434 ch * DDRIODQ_CH_OFFSET,
435 ((cas + 7) << 16) | ((cas - 4) << 8) |
436 ((cwl - 2) << 0), 0x003f1f1f);
437
438
439 mrc_alt_write_mask(DDRPHY,
440 B0RXIOBUFCTL +
441 bl_grp * DDRIODQ_BL_OFFSET +
442 ch * DDRIODQ_CH_OFFSET,
443 0, 0x81);
444 mrc_alt_write_mask(DDRPHY,
445 B1RXIOBUFCTL +
446 bl_grp * DDRIODQ_BL_OFFSET +
447 ch * DDRIODQ_CH_OFFSET,
448 0, 0x81);
449
450
451 mrc_alt_write_mask(DDRPHY,
452 DQCTL +
453 bl_grp * DDRIODQ_BL_OFFSET +
454 ch * DDRIODQ_CH_OFFSET,
455 1 << 16, 1 << 16);
456 mrc_alt_write_mask(DDRPHY,
457 B01PTRCTL1 +
458 bl_grp * DDRIODQ_BL_OFFSET +
459 ch * DDRIODQ_CH_OFFSET,
460 1 << 8, 1 << 8);
461
462
463
464 mrc_alt_write_mask(DDRPHY,
465 B0VREFCTL +
466 bl_grp * DDRIODQ_BL_OFFSET +
467 ch * DDRIODQ_CH_OFFSET,
468 (0x03 << 2) | (0x0 << 1) | (0x0 << 0),
469 0xff);
470
471 mrc_alt_write_mask(DDRPHY,
472 B1VREFCTL +
473 bl_grp * DDRIODQ_BL_OFFSET +
474 ch * DDRIODQ_CH_OFFSET,
475 (0x03 << 2) | (0x0 << 1) | (0x0 << 0),
476 0xff);
477
478 mrc_alt_write_mask(DDRPHY,
479 B0RXIOBUFCTL +
480 bl_grp * DDRIODQ_BL_OFFSET +
481 ch * DDRIODQ_CH_OFFSET,
482 0, 0x10);
483
484 mrc_alt_write_mask(DDRPHY,
485 B1RXIOBUFCTL +
486 bl_grp * DDRIODQ_BL_OFFSET +
487 ch * DDRIODQ_CH_OFFSET,
488 0, 0x10);
489 }
490
491
492 mrc_alt_write_mask(DDRPHY,
493 CMDOBSCKEBBCTL + ch * DDRIOCCC_CH_OFFSET,
494 0, 1 << 23);
495
496
497 mrc_alt_write_mask(DDRPHY,
498 CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
499 0, 0x03);
500
501
502 mrc_alt_write_mask(DDRPHY,
503 CMDRCOMPODT + ch * DDRIOCCC_CH_OFFSET,
504 (0x03 << 5) | (0x03 << 0), 0x3ff);
505
506
507
508
509 mrc_alt_write_mask(DDRPHY,
510 CMDPMDLYREG4 + ch * DDRIOCCC_CH_OFFSET,
511 0xffffffff, 0xffffffff);
512
513
514
515
516
517 mrc_alt_write_mask(DDRPHY,
518 CMDPMDLYREG3 + ch * DDRIOCCC_CH_OFFSET,
519 0xfffff616, 0xffffffff);
520
521 mrc_alt_write_mask(DDRPHY,
522 CMDPMDLYREG2 + ch * DDRIOCCC_CH_OFFSET,
523 0xffffffff, 0xffffffff);
524
525 mrc_alt_write_mask(DDRPHY,
526 CMDPMDLYREG1 + ch * DDRIOCCC_CH_OFFSET,
527 0xffffffff, 0xffffffff);
528
529 mrc_alt_write_mask(DDRPHY,
530 CMDPMDLYREG0 + ch * DDRIOCCC_CH_OFFSET,
531 0xffffffff, 0xffffffff);
532
533 mrc_alt_write_mask(DDRPHY,
534 CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET,
535 (0x6 << 8) | (0x1 << 6) | (0x4 << 0),
536 0xffe00f4f);
537
538 mrc_alt_write_mask(DDRPHY,
539 CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
540 (0x3 << 4) | (0x7 << 0), 0x7f);
541
542
543 mrc_alt_write_mask(DDRPHY,
544 CCOBSCKEBBCTL + ch * DDRIOCCC_CH_OFFSET,
545 0, 1 << 24);
546
547 mrc_alt_write_mask(DDRPHY,
548 CCCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
549 0x1f, 0x000ffff1);
550
551 mrc_alt_write_mask(DDRPHY,
552 CCRCOMPODT + ch * DDRIOCCC_CH_OFFSET,
553 (0x03 << 8) | (0x03 << 0), 0x00001f1f);
554
555 mrc_alt_write_mask(DDRPHY,
556 CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
557 (0x3 << 4) | (0x7 << 0), 0x7f);
558
559
560
561
562
563
564
565
566 mrc_alt_write_mask(DDRPHY,
567 DQVREFCH0 + ch * DDRCOMP_CH_OFFSET,
568 (0x08 << 24) | (0x03 << 16), 0x3f3f0000);
569
570 mrc_alt_write_mask(DDRPHY,
571 CMDVREFCH0 + ch * DDRCOMP_CH_OFFSET,
572 (0x0C << 24) | (0x03 << 16), 0x3f3f0000);
573
574 mrc_alt_write_mask(DDRPHY,
575 CLKVREFCH0 + ch * DDRCOMP_CH_OFFSET,
576 (0x0F << 24) | (0x03 << 16), 0x3f3f0000);
577
578 mrc_alt_write_mask(DDRPHY,
579 DQSVREFCH0 + ch * DDRCOMP_CH_OFFSET,
580 (0x08 << 24) | (0x03 << 16), 0x3f3f0000);
581
582 mrc_alt_write_mask(DDRPHY,
583 CTLVREFCH0 + ch * DDRCOMP_CH_OFFSET,
584 (0x0C << 24) | (0x03 << 16), 0x3f3f0000);
585
586
587 mrc_alt_write_mask(DDRPHY,
588 COMPEN1CH0 + ch * DDRCOMP_CH_OFFSET,
589 (1 << 19) | (1 << 17), 0xc00ac000);
590
591
592
593 mrc_alt_write_mask(DDRPHY,
594 DQVREFCH0 + ch * DDRCOMP_CH_OFFSET,
595 (0x32 << 8) | (0x03 << 0), 0x00003f3f);
596
597 mrc_alt_write_mask(DDRPHY,
598 DQSVREFCH0 + ch * DDRCOMP_CH_OFFSET,
599 (0x32 << 8) | (0x03 << 0), 0x00003f3f);
600
601 mrc_alt_write_mask(DDRPHY,
602 CLKVREFCH0 + ch * DDRCOMP_CH_OFFSET,
603 (0x0E << 8) | (0x05 << 0), 0x00003f3f);
604
605
606
607
608
609
610
611 temp = (0x0e << 16) | (0x0e << 12) | (0x08 << 8) |
612 (0x0b << 4) | (0x0b << 0);
613
614 mrc_alt_write_mask(DDRPHY,
615 DLYSELCH0 + ch * DDRCOMP_CH_OFFSET,
616 temp, 0x000fffff);
617
618 mrc_alt_write_mask(DDRPHY,
619 TCOVREFCH0 + ch * DDRCOMP_CH_OFFSET,
620 (0x05 << 16) | (0x05 << 8) | (0x05 << 0),
621 0x003f3f3f);
622
623 mrc_alt_write_mask(DDRPHY,
624 CCBUFODTCH0 + ch * DDRCOMP_CH_OFFSET,
625 (0x03 << 8) | (0x03 << 0),
626 0x00001f1f);
627
628 mrc_alt_write_mask(DDRPHY,
629 COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET,
630 0, 0xc0000100);
631
632#ifdef BACKUP_COMPS
633
634
635 mrc_alt_write_mask(DDRPHY,
636 DQDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
637 (1 << 31) | (0x0a << 16),
638 0x801f0000);
639
640 mrc_alt_write_mask(DDRPHY,
641 DQDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
642 (1 << 31) | (0x0a << 16),
643 0x801f0000);
644
645 mrc_alt_write_mask(DDRPHY,
646 DQDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
647 (1 << 31) | (0x10 << 16),
648 0x801f0000);
649
650 mrc_alt_write_mask(DDRPHY,
651 DQDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
652 (1 << 31) | (0x10 << 16),
653 0x801f0000);
654
655 mrc_alt_write_mask(DDRPHY,
656 DQODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
657 (1 << 31) | (0x0b << 16),
658 0x801f0000);
659
660 mrc_alt_write_mask(DDRPHY,
661 DQODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
662 (1 << 31) | (0x0b << 16),
663 0x801f0000);
664
665 mrc_alt_write_mask(DDRPHY,
666 DQTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
667 1 << 31, 1 << 31);
668
669 mrc_alt_write_mask(DDRPHY,
670 DQTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
671 1 << 31, 1 << 31);
672
673
674
675 mrc_alt_write_mask(DDRPHY,
676 DQSDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
677 (1 << 31) | (0x0a << 16),
678 0x801f0000);
679
680 mrc_alt_write_mask(DDRPHY,
681 DQSDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
682 (1 << 31) | (0x0a << 16),
683 0x801f0000);
684
685 mrc_alt_write_mask(DDRPHY,
686 DQSDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
687 (1 << 31) | (0x10 << 16),
688 0x801f0000);
689
690 mrc_alt_write_mask(DDRPHY,
691 DQSDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
692 (1 << 31) | (0x10 << 16),
693 0x801f0000);
694
695 mrc_alt_write_mask(DDRPHY,
696 DQSODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
697 (1 << 31) | (0x0b << 16),
698 0x801f0000);
699
700 mrc_alt_write_mask(DDRPHY,
701 DQSODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
702 (1 << 31) | (0x0b << 16),
703 0x801f0000);
704
705 mrc_alt_write_mask(DDRPHY,
706 DQSTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
707 1 << 31, 1 << 31);
708
709 mrc_alt_write_mask(DDRPHY,
710 DQSTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
711 1 << 31, 1 << 31);
712
713
714
715 mrc_alt_write_mask(DDRPHY,
716 CLKDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
717 (1 << 31) | (0x0c << 16),
718 0x801f0000);
719
720 mrc_alt_write_mask(DDRPHY,
721 CLKDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
722 (1 << 31) | (0x0c << 16),
723 0x801f0000);
724
725 mrc_alt_write_mask(DDRPHY,
726 CLKDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
727 (1 << 31) | (0x07 << 16),
728 0x801f0000);
729
730 mrc_alt_write_mask(DDRPHY,
731 CLKDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
732 (1 << 31) | (0x07 << 16),
733 0x801f0000);
734
735 mrc_alt_write_mask(DDRPHY,
736 CLKODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
737 (1 << 31) | (0x0b << 16),
738 0x801f0000);
739
740 mrc_alt_write_mask(DDRPHY,
741 CLKODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
742 (1 << 31) | (0x0b << 16),
743 0x801f0000);
744
745 mrc_alt_write_mask(DDRPHY,
746 CLKTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
747 1 << 31, 1 << 31);
748
749 mrc_alt_write_mask(DDRPHY,
750 CLKTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
751 1 << 31, 1 << 31);
752
753
754
755 mrc_alt_write_mask(DDRPHY,
756 CMDDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
757 (1 << 31) | (0x0d << 16),
758 0x803f0000);
759
760 mrc_alt_write_mask(DDRPHY,
761 CMDDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
762 (1 << 31) | (0x0d << 16),
763 0x803f0000);
764
765 mrc_alt_write_mask(DDRPHY,
766 CMDDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
767 (1 << 31) | (0x0a << 16),
768 0x801f0000);
769
770 mrc_alt_write_mask(DDRPHY,
771 CMDDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
772 (1 << 31) | (0x0a << 16),
773 0x801f0000);
774
775
776
777 mrc_alt_write_mask(DDRPHY,
778 CTLDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
779 (1 << 31) | (0x0d << 16),
780 0x803f0000);
781
782 mrc_alt_write_mask(DDRPHY,
783 CTLDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
784 (1 << 31) | (0x0d << 16),
785 0x803f0000);
786
787 mrc_alt_write_mask(DDRPHY,
788 CTLDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
789 (1 << 31) | (0x0a << 16),
790 0x801f0000);
791
792 mrc_alt_write_mask(DDRPHY,
793 CTLDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
794 (1 << 31) | (0x0a << 16),
795 0x801f0000);
796#else
797
798
799 mrc_alt_write_mask(DDRPHY,
800 DQTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
801 (1 << 31) | (0x1f << 16),
802 0x801f0000);
803
804 mrc_alt_write_mask(DDRPHY,
805 DQTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
806 (1 << 31) | (0x1f << 16),
807 0x801f0000);
808
809
810
811 mrc_alt_write_mask(DDRPHY,
812 DQSTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
813 (1 << 31) | (0x1f << 16),
814 0x801f0000);
815
816 mrc_alt_write_mask(DDRPHY,
817 DQSTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
818 (1 << 31) | (0x1f << 16),
819 0x801f0000);
820
821
822
823 mrc_alt_write_mask(DDRPHY,
824 CLKTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET,
825 (1 << 31) | (0x1f << 16),
826 0x801f0000);
827
828 mrc_alt_write_mask(DDRPHY,
829 CLKTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET,
830 (1 << 31) | (0x1f << 16),
831 0x801f0000);
832#endif
833
834
835#ifdef BACKUP_WCMD
836 set_wcmd(ch, ddr_wcmd[PLATFORM_ID]);
837#else
838 set_wcmd(ch, ddr_wclk[PLATFORM_ID] + HALF_CLK);
839#endif
840
841 for (rk = 0; rk < NUM_RANKS; rk++) {
842 if (mrc_params->rank_enables & (1 << rk)) {
843 set_wclk(ch, rk, ddr_wclk[PLATFORM_ID]);
844#ifdef BACKUP_WCTL
845 set_wctl(ch, rk, ddr_wctl[PLATFORM_ID]);
846#else
847 set_wctl(ch, rk, ddr_wclk[PLATFORM_ID] + HALF_CLK);
848#endif
849 }
850 }
851 }
852 }
853
854
855
856 mrc_alt_write_mask(DDRPHY, DQANADRVPUCTL, 1 << 30, 1 << 30);
857
858 mrc_alt_write_mask(DDRPHY, DQANADRVPDCTL, 1 << 30, 1 << 30);
859
860 mrc_alt_write_mask(DDRPHY, CMDANADRVPUCTL, 1 << 30, 1 << 30);
861
862 mrc_alt_write_mask(DDRPHY, CMDANADRVPDCTL, 1 << 30, 1 << 30);
863
864 mrc_alt_write_mask(DDRPHY, CLKANADRVPUCTL, 1 << 30, 1 << 30);
865
866 mrc_alt_write_mask(DDRPHY, CLKANADRVPDCTL, 1 << 30, 1 << 30);
867
868 mrc_alt_write_mask(DDRPHY, DQSANADRVPUCTL, 1 << 30, 1 << 30);
869
870 mrc_alt_write_mask(DDRPHY, DQSANADRVPDCTL, 1 << 30, 1 << 30);
871
872 mrc_alt_write_mask(DDRPHY, CTLANADRVPUCTL, 1 << 30, 1 << 30);
873
874 mrc_alt_write_mask(DDRPHY, CTLANADRVPDCTL, 1 << 30, 1 << 30);
875
876 mrc_alt_write_mask(DDRPHY, DQANAODTPUCTL, 1 << 30, 1 << 30);
877
878 mrc_alt_write_mask(DDRPHY, DQANAODTPDCTL, 1 << 30, 1 << 30);
879
880 mrc_alt_write_mask(DDRPHY, CLKANAODTPUCTL, 1 << 30, 1 << 30);
881
882 mrc_alt_write_mask(DDRPHY, CLKANAODTPDCTL, 1 << 30, 1 << 30);
883
884 mrc_alt_write_mask(DDRPHY, DQSANAODTPUCTL, 1 << 30, 1 << 30);
885
886 mrc_alt_write_mask(DDRPHY, DQSANAODTPDCTL, 1 << 30, 1 << 30);
887
888 mrc_alt_write_mask(DDRPHY, DQANADLYPUCTL, 1 << 30, 1 << 30);
889
890 mrc_alt_write_mask(DDRPHY, DQANADLYPDCTL, 1 << 30, 1 << 30);
891
892 mrc_alt_write_mask(DDRPHY, CMDANADLYPUCTL, 1 << 30, 1 << 30);
893
894 mrc_alt_write_mask(DDRPHY, CMDANADLYPDCTL, 1 << 30, 1 << 30);
895
896 mrc_alt_write_mask(DDRPHY, CLKANADLYPUCTL, 1 << 30, 1 << 30);
897
898 mrc_alt_write_mask(DDRPHY, CLKANADLYPDCTL, 1 << 30, 1 << 30);
899
900 mrc_alt_write_mask(DDRPHY, DQSANADLYPUCTL, 1 << 30, 1 << 30);
901
902 mrc_alt_write_mask(DDRPHY, DQSANADLYPDCTL, 1 << 30, 1 << 30);
903
904 mrc_alt_write_mask(DDRPHY, CTLANADLYPUCTL, 1 << 30, 1 << 30);
905
906 mrc_alt_write_mask(DDRPHY, CTLANADLYPDCTL, 1 << 30, 1 << 30);
907
908 mrc_alt_write_mask(DDRPHY, DQANATCOPUCTL, 1 << 30, 1 << 30);
909
910 mrc_alt_write_mask(DDRPHY, DQANATCOPDCTL, 1 << 30, 1 << 30);
911
912 mrc_alt_write_mask(DDRPHY, CLKANATCOPUCTL, 1 << 30, 1 << 30);
913
914 mrc_alt_write_mask(DDRPHY, CLKANATCOPDCTL, 1 << 30, 1 << 30);
915
916 mrc_alt_write_mask(DDRPHY, DQSANATCOPUCTL, 1 << 30, 1 << 30);
917
918 mrc_alt_write_mask(DDRPHY, DQSANATCOPDCTL, 1 << 30, 1 << 30);
919
920 mrc_alt_write_mask(DDRPHY, TCOCNTCTRL, 1, 3);
921
922 mrc_alt_write_mask(DDRPHY, CHNLBUFSTATIC,
923 (0x03 << 24) | (0x03 << 16), 0x1f1f0000);
924
925 mrc_alt_write_mask(DDRPHY, MSCNTR, 0x64, 0xff);
926 mrc_alt_write_mask(DDRPHY, LATCH1CTL, 0x1 << 28, 0x70000000);
927
928
929 mrc_alt_write_mask(DDRPHY, MASTERRSTN, 1, 1);
930
931
932 mrc_post_code(0x03, 0x11);
933
934 for (ch = 0; ch < NUM_CHANNELS; ch++) {
935 if (mrc_params->channel_enables & (1 << ch)) {
936
937 for (bl_grp = 0;
938 bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
939 bl_grp++) {
940 mrc_alt_write_mask(DDRPHY,
941 DQMDLLCTL +
942 bl_grp * DDRIODQ_BL_OFFSET +
943 ch * DDRIODQ_CH_OFFSET,
944 1 << 13,
945 1 << 13);
946 delay_n(3);
947 }
948
949
950 mrc_alt_write_mask(DDRPHY, ECCMDLLCTL,
951 1 << 13, 1 << 13);
952 delay_n(3);
953
954 mrc_alt_write_mask(DDRPHY,
955 CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
956 1 << 13, 1 << 13);
957 delay_n(3);
958
959 mrc_alt_write_mask(DDRPHY,
960 CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
961 1 << 13, 1 << 13);
962 delay_n(3);
963 }
964 }
965
966
967 mrc_post_code(0x03, 0x12);
968 delay_n(200);
969
970 for (ch = 0; ch < NUM_CHANNELS; ch++) {
971 if (mrc_params->channel_enables & (1 << ch)) {
972
973 for (bl_grp = 0;
974 bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
975 bl_grp++) {
976 mrc_alt_write_mask(DDRPHY,
977 DQMDLLCTL +
978 bl_grp * DDRIODQ_BL_OFFSET +
979 ch * DDRIODQ_CH_OFFSET,
980 1 << 17,
981 1 << 17);
982 delay_n(50);
983 }
984
985
986 mrc_alt_write_mask(DDRPHY, ECCMDLLCTL,
987 1 << 17, 1 << 17);
988 delay_n(50);
989
990 mrc_alt_write_mask(DDRPHY,
991 CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
992 1 << 18, 1 << 18);
993 delay_n(50);
994
995 mrc_alt_write_mask(DDRPHY,
996 CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET,
997 1 << 18, 1 << 18);
998 delay_n(50);
999 }
1000 }
1001
1002
1003 mrc_post_code(0x03, 0x13);
1004 delay_n(100);
1005
1006 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1007 if (mrc_params->channel_enables & (1 << ch)) {
1008
1009 for (bl_grp = 0;
1010 bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
1011 bl_grp++) {
1012#ifdef FORCE_16BIT_DDRIO
1013 temp = (bl_grp &&
1014 (mrc_params->channel_width == X16)) ?
1015 0x11ff : 0xffff;
1016#else
1017 temp = 0xffff;
1018#endif
1019
1020 mrc_alt_write_mask(DDRPHY,
1021 DQDLLTXCTL +
1022 bl_grp * DDRIODQ_BL_OFFSET +
1023 ch * DDRIODQ_CH_OFFSET,
1024 temp, 0xffff);
1025 delay_n(3);
1026
1027 mrc_alt_write_mask(DDRPHY,
1028 DQDLLRXCTL +
1029 bl_grp * DDRIODQ_BL_OFFSET +
1030 ch * DDRIODQ_CH_OFFSET,
1031 0xf, 0xf);
1032 delay_n(3);
1033
1034 mrc_alt_write_mask(DDRPHY,
1035 B0OVRCTL +
1036 bl_grp * DDRIODQ_BL_OFFSET +
1037 ch * DDRIODQ_CH_OFFSET,
1038 0xf, 0xf);
1039 }
1040
1041
1042 temp = 0xffff;
1043 mrc_alt_write_mask(DDRPHY, ECCDLLTXCTL,
1044 temp, 0xffff);
1045 delay_n(3);
1046
1047
1048 mrc_alt_write_mask(DDRPHY,
1049 CMDDLLTXCTL + ch * DDRIOCCC_CH_OFFSET,
1050 temp, 0xffff);
1051 delay_n(3);
1052 }
1053 }
1054
1055
1056 mrc_post_code(0x03, 0x14);
1057
1058 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1059 if (mrc_params->channel_enables & (1 << ch)) {
1060
1061 for (bl_grp = 0;
1062 bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2;
1063 bl_grp++) {
1064
1065 mrc_alt_write_mask(DDRPHY,
1066 DQCLKALIGNREG2 +
1067 bl_grp * DDRIODQ_BL_OFFSET +
1068 ch * DDRIODQ_CH_OFFSET,
1069 bl_grp ? 3 : 1,
1070 0xf);
1071 }
1072
1073 mrc_alt_write_mask(DDRPHY,
1074 ECCCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET,
1075 0x2, 0xf);
1076 mrc_alt_write_mask(DDRPHY,
1077 CMDCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET,
1078 0x0, 0xf);
1079 mrc_alt_write_mask(DDRPHY,
1080 CCCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET,
1081 0x2, 0xf);
1082 mrc_alt_write_mask(DDRPHY,
1083 CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET,
1084 0x20, 0x30);
1085
1086
1087
1088
1089 mrc_alt_write_mask(DDRPHY,
1090 CMDCLKALIGNREG1 + ch * DDRIOCCC_CH_OFFSET,
1091 (0x18 << 16) | (0x10 << 8) |
1092 (0x8 << 2) | (0x1 << 0),
1093 0x007f7fff);
1094
1095 mrc_alt_write_mask(DDRPHY,
1096 CMDCLKALIGNREG2 + ch * DDRIOCCC_CH_OFFSET,
1097 (0x10 << 16) | (0x4 << 8) | (0x2 << 4),
1098 0x001f0ff0);
1099#ifdef HMC_TEST
1100
1101 mrc_alt_write_mask(DDRPHY,
1102 CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET,
1103 1 << 24, 1 << 24);
1104 while (msg_port_alt_read(DDRPHY,
1105 CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET) &
1106 (1 << 24))
1107 ;
1108#endif
1109
1110
1111 mrc_alt_write_mask(DDRPHY,
1112 CMDPTRREG + ch * DDRIOCCC_CH_OFFSET,
1113 1, 1);
1114
1115
1116
1117 mrc_alt_write_mask(DDRPHY,
1118 COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET,
1119 1 << 5, 1 << 5);
1120
1121 mrc_alt_write_mask(DDRPHY, CMPCTRL, 1, 1);
1122
1123 while (msg_port_alt_read(DDRPHY, CMPCTRL) & 1)
1124 ;
1125
1126 mrc_alt_write_mask(DDRPHY,
1127 COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET,
1128 ~(1 << 5), 1 << 5);
1129
1130
1131
1132
1133 mrc_alt_write_mask(DDRPHY,
1134 CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET,
1135 1 << 2, 1 << 2);
1136
1137
1138 mrc_alt_write_mask(DDRPHY,
1139 CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET,
1140 1 << 20, 1 << 20);
1141 }
1142 }
1143
1144 LEAVEFN();
1145}
1146
1147
1148void perform_jedec_init(struct mrc_params *mrc_params)
1149{
1150 uint8_t twr, wl, rank;
1151 uint32_t tck;
1152 u32 dtr0;
1153 u32 drp;
1154 u32 drmc;
1155 u32 mrs0_cmd = 0;
1156 u32 emrs1_cmd = 0;
1157 u32 emrs2_cmd = 0;
1158 u32 emrs3_cmd = 0;
1159
1160 ENTERFN();
1161
1162
1163 mrc_post_code(0x04, 0x00);
1164
1165
1166 mrc_alt_write_mask(DDRPHY, CCDDR3RESETCTL, 2, 0x102);
1167
1168
1169 delay_u(200);
1170
1171
1172 mrc_alt_write_mask(DDRPHY, CCDDR3RESETCTL, 0x100, 0x102);
1173
1174 dtr0 = msg_port_read(MEM_CTLR, DTR0);
1175
1176
1177
1178
1179
1180
1181 drp = msg_port_read(MEM_CTLR, DRP);
1182 drp &= 0x3;
1183
1184 drmc = msg_port_read(MEM_CTLR, DRMC);
1185 drmc &= 0xfffffffc;
1186 drmc |= (DRMC_CKEMODE | drp);
1187
1188 msg_port_write(MEM_CTLR, DRMC, drmc);
1189
1190 for (rank = 0; rank < NUM_RANKS; rank++) {
1191
1192 if ((mrc_params->rank_enables & (1 << rank)) == 0)
1193 continue;
1194
1195 dram_init_command(DCMD_NOP(rank));
1196 }
1197
1198 msg_port_write(MEM_CTLR, DRMC,
1199 (mrc_params->rd_odt_value == 0 ? DRMC_ODTMODE : 0));
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211 emrs2_cmd |= (2 << 3);
1212 wl = 5 + mrc_params->ddr_speed;
1213 emrs2_cmd |= ((wl - 5) << 9);
1214 emrs2_cmd |= (mrc_params->sr_temp_range << 13);
1215
1216
1217
1218
1219
1220
1221
1222 emrs3_cmd |= (3 << 3);
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249 emrs1_cmd |= (1 << 3);
1250 emrs1_cmd &= ~(1 << 6);
1251
1252 if (mrc_params->ron_value == 0)
1253 emrs1_cmd |= (1 << 7);
1254 else
1255 emrs1_cmd &= ~(1 << 7);
1256
1257 if (mrc_params->rtt_nom_value == 0)
1258 emrs1_cmd |= (DDR3_EMRS1_RTTNOM_40 << 6);
1259 else if (mrc_params->rtt_nom_value == 1)
1260 emrs1_cmd |= (DDR3_EMRS1_RTTNOM_60 << 6);
1261 else if (mrc_params->rtt_nom_value == 2)
1262 emrs1_cmd |= (DDR3_EMRS1_RTTNOM_120 << 6);
1263
1264
1265 mrc_params->mrs1 = emrs1_cmd >> 6;
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290 mrs0_cmd |= (1 << 14);
1291 mrs0_cmd |= (1 << 18);
1292 mrs0_cmd |= ((((dtr0 >> 12) & 7) + 1) << 10);
1293
1294 tck = t_ck[mrc_params->ddr_speed];
1295
1296 twr = MCEIL(15000, tck);
1297 mrs0_cmd |= ((twr - 4) << 15);
1298
1299 for (rank = 0; rank < NUM_RANKS; rank++) {
1300
1301 if ((mrc_params->rank_enables & (1 << rank)) == 0)
1302 continue;
1303
1304 emrs2_cmd |= (rank << 22);
1305 dram_init_command(emrs2_cmd);
1306
1307 emrs3_cmd |= (rank << 22);
1308 dram_init_command(emrs3_cmd);
1309
1310 emrs1_cmd |= (rank << 22);
1311 dram_init_command(emrs1_cmd);
1312
1313 mrs0_cmd |= (rank << 22);
1314 dram_init_command(mrs0_cmd);
1315
1316 dram_init_command(DCMD_ZQCL(rank));
1317 }
1318
1319 LEAVEFN();
1320}
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331void set_ddr_init_complete(struct mrc_params *mrc_params)
1332{
1333 u32 dco;
1334
1335 ENTERFN();
1336
1337 dco = msg_port_read(MEM_CTLR, DCO);
1338 dco &= ~DCO_PMICTL;
1339 dco |= DCO_IC;
1340 msg_port_write(MEM_CTLR, DCO, dco);
1341
1342 LEAVEFN();
1343}
1344
1345
1346
1347
1348
1349
1350
1351void restore_timings(struct mrc_params *mrc_params)
1352{
1353 uint8_t ch, rk, bl;
1354 const struct mrc_timings *mt = &mrc_params->timings;
1355
1356 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1357 for (rk = 0; rk < NUM_RANKS; rk++) {
1358 for (bl = 0; bl < NUM_BYTE_LANES; bl++) {
1359 set_rcvn(ch, rk, bl, mt->rcvn[ch][rk][bl]);
1360 set_rdqs(ch, rk, bl, mt->rdqs[ch][rk][bl]);
1361 set_wdqs(ch, rk, bl, mt->wdqs[ch][rk][bl]);
1362 set_wdq(ch, rk, bl, mt->wdq[ch][rk][bl]);
1363 if (rk == 0) {
1364
1365 set_vref(ch, bl, mt->vref[ch][bl]);
1366 }
1367 }
1368 set_wctl(ch, rk, mt->wctl[ch][rk]);
1369 }
1370 set_wcmd(ch, mt->wcmd[ch]);
1371 }
1372}
1373
1374
1375
1376
1377
1378
1379
1380void default_timings(struct mrc_params *mrc_params)
1381{
1382 uint8_t ch, rk, bl;
1383
1384 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1385 for (rk = 0; rk < NUM_RANKS; rk++) {
1386 for (bl = 0; bl < NUM_BYTE_LANES; bl++) {
1387 set_rdqs(ch, rk, bl, 24);
1388 if (rk == 0) {
1389
1390 set_vref(ch, bl, 32);
1391 }
1392 }
1393 }
1394 }
1395}
1396
1397
1398
1399
1400
1401
1402void rcvn_cal(struct mrc_params *mrc_params)
1403{
1404 uint8_t ch;
1405 uint8_t rk;
1406 uint8_t bl;
1407 uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
1408
1409#ifdef R2R_SHARING
1410
1411 uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
1412#ifndef BACKUP_RCVN
1413
1414 uint32_t num_ranks_enabled = 0;
1415#endif
1416#endif
1417
1418#ifdef BACKUP_RCVN
1419#else
1420 uint32_t temp;
1421
1422 uint32_t delay[NUM_BYTE_LANES];
1423 u32 dtr1, dtr1_save;
1424#endif
1425
1426 ENTERFN();
1427
1428
1429 mrc_post_code(0x05, 0x00);
1430
1431#ifndef BACKUP_RCVN
1432
1433 dtr1 = msg_port_read(MEM_CTLR, DTR1);
1434 dtr1_save = dtr1;
1435 dtr1 |= DTR1_TCCD_12CLK;
1436 msg_port_write(MEM_CTLR, DTR1, dtr1);
1437#endif
1438
1439#ifdef R2R_SHARING
1440
1441 memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
1442#endif
1443
1444
1445 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1446 if (mrc_params->channel_enables & (1 << ch)) {
1447
1448 for (rk = 0; rk < NUM_RANKS; rk++) {
1449 if (mrc_params->rank_enables & (1 << rk)) {
1450
1451
1452
1453
1454 mrc_post_code(0x05, 0x10 + ((ch << 4) | rk));
1455
1456#ifdef BACKUP_RCVN
1457
1458 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++)
1459 set_rcvn(ch, rk, bl, ddr_rcvn[PLATFORM_ID]);
1460#else
1461
1462 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl += 2) {
1463 mrc_alt_write_mask(DDRPHY,
1464 B01PTRCTL1 +
1465 (bl >> 1) * DDRIODQ_BL_OFFSET +
1466 ch * DDRIODQ_CH_OFFSET,
1467 0, 1 << 8);
1468 }
1469
1470 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1471
1472 delay[bl] = (4 + 1) * FULL_CLK;
1473
1474 set_rcvn(ch, rk, bl, delay[bl]);
1475 }
1476
1477
1478 find_rising_edge(mrc_params, delay, ch, rk, true);
1479
1480
1481 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1482 delay[bl] += QRTR_CLK;
1483 set_rcvn(ch, rk, bl, delay[bl]);
1484 }
1485
1486 do {
1487 temp = sample_dqs(mrc_params, ch, rk, true);
1488 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1489 if (temp & (1 << bl)) {
1490 if (delay[bl] >= FULL_CLK) {
1491 delay[bl] -= FULL_CLK;
1492 set_rcvn(ch, rk, bl, delay[bl]);
1493 } else {
1494
1495 training_message(ch, rk, bl);
1496 mrc_post_code(0xee, 0x50);
1497 }
1498 }
1499 }
1500 } while (temp & 0xff);
1501
1502#ifdef R2R_SHARING
1503
1504 num_ranks_enabled++;
1505
1506 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1507 delay[bl] += QRTR_CLK;
1508
1509 final_delay[ch][bl] += delay[bl];
1510
1511 set_rcvn(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled);
1512 }
1513#else
1514
1515 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1516 delay[bl] += QRTR_CLK;
1517 set_rcvn(ch, rk, bl, delay[bl]);
1518 }
1519#endif
1520
1521
1522 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl += 2) {
1523 mrc_alt_write_mask(DDRPHY,
1524 B01PTRCTL1 +
1525 (bl >> 1) * DDRIODQ_BL_OFFSET +
1526 ch * DDRIODQ_CH_OFFSET,
1527 1 << 8, 1 << 8);
1528 }
1529#endif
1530 }
1531 }
1532 }
1533 }
1534
1535#ifndef BACKUP_RCVN
1536
1537 msg_port_write(MEM_CTLR, DTR1, dtr1_save);
1538#endif
1539
1540 LEAVEFN();
1541}
1542
1543
1544
1545
1546
1547
1548
1549void wr_level(struct mrc_params *mrc_params)
1550{
1551 uint8_t ch;
1552 uint8_t rk;
1553 uint8_t bl;
1554 uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
1555
1556#ifdef R2R_SHARING
1557
1558 uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
1559#ifndef BACKUP_WDQS
1560
1561 uint32_t num_ranks_enabled = 0;
1562#endif
1563#endif
1564
1565#ifdef BACKUP_WDQS
1566#else
1567
1568 bool all_edges_found;
1569
1570 uint32_t delay[NUM_BYTE_LANES];
1571
1572
1573
1574
1575
1576 uint32_t address;
1577 u32 dtr4, dtr4_save;
1578#endif
1579
1580 ENTERFN();
1581
1582
1583 mrc_post_code(0x06, 0x00);
1584
1585#ifdef R2R_SHARING
1586
1587 memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
1588#endif
1589
1590
1591 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1592 if (mrc_params->channel_enables & (1 << ch)) {
1593
1594 for (rk = 0; rk < NUM_RANKS; rk++) {
1595 if (mrc_params->rank_enables & (1 << rk)) {
1596
1597
1598
1599
1600 mrc_post_code(0x06, 0x10 + ((ch << 4) | rk));
1601
1602#ifdef BACKUP_WDQS
1603 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1604 set_wdqs(ch, rk, bl, ddr_wdqs[PLATFORM_ID]);
1605 set_wdq(ch, rk, bl, ddr_wdqs[PLATFORM_ID] - QRTR_CLK);
1606 }
1607#else
1608
1609
1610
1611
1612 dram_init_command(DCMD_PREA(rk));
1613
1614
1615
1616
1617
1618 dram_init_command(DCMD_MRS1(rk, 0x82));
1619
1620
1621
1622
1623
1624
1625 dtr4 = msg_port_read(MEM_CTLR, DTR4);
1626 dtr4_save = dtr4;
1627 dtr4 |= DTR4_ODTDIS;
1628 msg_port_write(MEM_CTLR, DTR4, dtr4);
1629
1630 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) {
1631
1632
1633
1634
1635 mrc_alt_write_mask(DDRPHY,
1636 DQCTL + DDRIODQ_BL_OFFSET * bl + DDRIODQ_CH_OFFSET * ch,
1637 0x10000154,
1638 0x100003fc);
1639 }
1640
1641
1642 mrc_alt_write_mask(DDRPHY,
1643 CCDDR3RESETCTL + DDRIOCCC_CH_OFFSET * ch,
1644 1 << 16, 1 << 16);
1645
1646
1647 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1648
1649
1650
1651
1652 delay[bl] = get_wclk(ch, rk);
1653
1654 set_wdqs(ch, rk, bl, delay[bl]);
1655 }
1656
1657
1658 find_rising_edge(mrc_params, delay, ch, rk, false);
1659
1660
1661 mrc_alt_write_mask(DDRPHY,
1662 CCDDR3RESETCTL + DDRIOCCC_CH_OFFSET * ch,
1663 0, 1 << 16);
1664
1665 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) {
1666
1667 mrc_alt_write_mask(DDRPHY,
1668 DQCTL + DDRIODQ_BL_OFFSET * bl + DDRIODQ_CH_OFFSET * ch,
1669 0x00000154,
1670 0x100003fc);
1671 }
1672
1673
1674 msg_port_write(MEM_CTLR, DTR4, dtr4_save);
1675
1676
1677
1678
1679
1680 dram_init_command(DCMD_MRS1(rk, mrc_params->mrs1));
1681
1682
1683
1684
1685
1686 dram_init_command(DCMD_PREA(rk));
1687
1688 mrc_post_code(0x06, 0x30 + ((ch << 4) | rk));
1689
1690
1691
1692
1693
1694
1695
1696 mrc_params->hte_setup = 1;
1697
1698
1699 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1700 delay[bl] = get_wdqs(ch, rk, bl) + FULL_CLK;
1701 set_wdqs(ch, rk, bl, delay[bl]);
1702
1703
1704
1705
1706 set_wdq(ch, rk, bl, (delay[bl] - QRTR_CLK));
1707 }
1708
1709
1710 address = get_addr(ch, rk);
1711 do {
1712 uint32_t coarse_result = 0x00;
1713 uint32_t coarse_result_mask = byte_lane_mask(mrc_params);
1714
1715 all_edges_found = true;
1716
1717 mrc_params->hte_setup = 1;
1718 coarse_result = check_rw_coarse(mrc_params, address);
1719
1720
1721 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
1722 if (coarse_result & (coarse_result_mask << bl)) {
1723 all_edges_found = false;
1724 delay[bl] -= FULL_CLK;
1725 set_wdqs(ch, rk, bl, delay[bl]);
1726
1727 set_wdq(ch, rk, bl, delay[bl] - QRTR_CLK);
1728 }
1729 }
1730 } while (!all_edges_found);
1731
1732#ifdef R2R_SHARING
1733
1734 num_ranks_enabled++;
1735
1736 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
1737 final_delay[ch][bl] += delay[bl];
1738 set_wdqs(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled);
1739
1740 set_wdq(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled - QRTR_CLK);
1741 }
1742#endif
1743#endif
1744 }
1745 }
1746 }
1747 }
1748
1749 LEAVEFN();
1750}
1751
1752void prog_page_ctrl(struct mrc_params *mrc_params)
1753{
1754 u32 dpmc0;
1755
1756 ENTERFN();
1757
1758 dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
1759 dpmc0 &= ~DPMC0_PCLSTO_MASK;
1760 dpmc0 |= (4 << 16);
1761 dpmc0 |= DPMC0_PREAPWDEN;
1762 msg_port_write(MEM_CTLR, DPMC0, dpmc0);
1763}
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784void rd_train(struct mrc_params *mrc_params)
1785{
1786 uint8_t ch;
1787 uint8_t rk;
1788 uint8_t bl;
1789 uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
1790#ifdef BACKUP_RDQS
1791#else
1792 uint8_t side_x;
1793 uint8_t side_y;
1794
1795 uint8_t x_coordinate[2][2][NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES];
1796
1797 uint8_t y_coordinate[2][2][NUM_CHANNELS][NUM_BYTE_LANES];
1798
1799 uint8_t x_center[NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES];
1800
1801 uint8_t y_center[NUM_CHANNELS][NUM_BYTE_LANES];
1802 uint32_t address;
1803 uint32_t result;
1804 uint32_t bl_mask;
1805#ifdef R2R_SHARING
1806
1807 uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
1808
1809 uint32_t num_ranks_enabled = 0;
1810#endif
1811#endif
1812
1813
1814 mrc_post_code(0x07, 0x00);
1815
1816 ENTERFN();
1817
1818#ifdef BACKUP_RDQS
1819 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1820 if (mrc_params->channel_enables & (1 << ch)) {
1821 for (rk = 0; rk < NUM_RANKS; rk++) {
1822 if (mrc_params->rank_enables & (1 << rk)) {
1823 for (bl = 0;
1824 bl < NUM_BYTE_LANES / bl_divisor;
1825 bl++) {
1826 set_rdqs(ch, rk, bl, ddr_rdqs[PLATFORM_ID]);
1827 }
1828 }
1829 }
1830 }
1831 }
1832#else
1833
1834 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1835 if (mrc_params->channel_enables & (1 << ch)) {
1836 for (rk = 0; rk < NUM_RANKS; rk++) {
1837 if (mrc_params->rank_enables & (1 << rk)) {
1838 for (bl = 0;
1839 bl < NUM_BYTE_LANES / bl_divisor;
1840 bl++) {
1841
1842 x_coordinate[L][B][ch][rk][bl] = RDQS_MIN;
1843 x_coordinate[R][B][ch][rk][bl] = RDQS_MAX;
1844 x_coordinate[L][T][ch][rk][bl] = RDQS_MIN;
1845 x_coordinate[R][T][ch][rk][bl] = RDQS_MAX;
1846
1847 y_coordinate[L][B][ch][bl] = VREF_MIN;
1848 y_coordinate[R][B][ch][bl] = VREF_MIN;
1849 y_coordinate[L][T][ch][bl] = VREF_MAX;
1850 y_coordinate[R][T][ch][bl] = VREF_MAX;
1851 }
1852 }
1853 }
1854 }
1855 }
1856
1857
1858 bl_mask = byte_lane_mask(mrc_params);
1859 address = get_addr(0, 0);
1860
1861#ifdef R2R_SHARING
1862
1863 memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
1864#endif
1865
1866
1867 for (side_y = B; side_y <= T; side_y++) {
1868 for (side_x = L; side_x <= R; side_x++) {
1869 mrc_post_code(0x07, 0x10 + side_y * 2 + side_x);
1870
1871
1872 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1873 if (mrc_params->channel_enables & (0x1 << ch)) {
1874 for (rk = 0; rk < NUM_RANKS; rk++) {
1875 if (mrc_params->rank_enables &
1876 (0x1 << rk)) {
1877
1878 for (bl = 0;
1879 bl < NUM_BYTE_LANES / bl_divisor;
1880 bl++) {
1881 set_rdqs(ch, rk, bl,
1882 x_coordinate[side_x][side_y][ch][rk][bl]);
1883 set_vref(ch, bl,
1884 y_coordinate[side_x][side_y][ch][bl]);
1885 }
1886
1887
1888 address = get_addr(ch, rk);
1889
1890
1891 mrc_params->hte_setup = 1;
1892
1893
1894 do {
1895
1896 result = check_bls_ex(mrc_params, address);
1897
1898
1899 if (result & 0xff) {
1900
1901 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
1902 if (result &
1903 (bl_mask << bl)) {
1904
1905 if (side_x == L)
1906 x_coordinate[L][side_y][ch][rk][bl] += RDQS_STEP;
1907 else
1908 x_coordinate[R][side_y][ch][rk][bl] -= RDQS_STEP;
1909
1910
1911 if ((x_coordinate[L][side_y][ch][rk][bl] > (RDQS_MAX - MIN_RDQS_EYE)) ||
1912 (x_coordinate[R][side_y][ch][rk][bl] < (RDQS_MIN + MIN_RDQS_EYE)) ||
1913 (x_coordinate[L][side_y][ch][rk][bl] ==
1914 x_coordinate[R][side_y][ch][rk][bl])) {
1915
1916
1917
1918
1919 if (side_y == B)
1920 y_coordinate[side_x][B][ch][bl] += VREF_STEP;
1921 else
1922 y_coordinate[side_x][T][ch][bl] -= VREF_STEP;
1923
1924
1925 if ((y_coordinate[side_x][B][ch][bl] > (VREF_MAX - MIN_VREF_EYE)) ||
1926 (y_coordinate[side_x][T][ch][bl] < (VREF_MIN + MIN_VREF_EYE)) ||
1927 (y_coordinate[side_x][B][ch][bl] == y_coordinate[side_x][T][ch][bl])) {
1928
1929 training_message(ch, rk, bl);
1930 mrc_post_code(0xEE, 0x70 + side_y * 2 + side_x);
1931 } else {
1932
1933 set_vref(ch, bl, y_coordinate[side_x][side_y][ch][bl]);
1934
1935 x_coordinate[side_x][side_y][ch][rk][bl] =
1936 (side_x == L) ? RDQS_MIN : RDQS_MAX;
1937 }
1938 }
1939
1940
1941 set_rdqs(ch, rk, bl, x_coordinate[side_x][side_y][ch][rk][bl]);
1942 }
1943 }
1944 }
1945 } while (result & 0xff);
1946 }
1947 }
1948 }
1949 }
1950 }
1951 }
1952
1953 mrc_post_code(0x07, 0x20);
1954
1955
1956 for (ch = 0; ch < NUM_CHANNELS; ch++) {
1957 if (mrc_params->channel_enables & (1 << ch)) {
1958 for (rk = 0; rk < NUM_RANKS; rk++) {
1959 if (mrc_params->rank_enables & (1 << rk)) {
1960 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
1961 uint32_t temp1;
1962 uint32_t temp2;
1963
1964
1965 DPF(D_INFO,
1966 "RDQS T/B eye rank%d lane%d : %d-%d %d-%d\n",
1967 rk, bl,
1968 x_coordinate[L][T][ch][rk][bl],
1969 x_coordinate[R][T][ch][rk][bl],
1970 x_coordinate[L][B][ch][rk][bl],
1971 x_coordinate[R][B][ch][rk][bl]);
1972
1973
1974 temp1 = (x_coordinate[R][T][ch][rk][bl] + x_coordinate[L][T][ch][rk][bl]) / 2;
1975
1976 temp2 = (x_coordinate[R][B][ch][rk][bl] + x_coordinate[L][B][ch][rk][bl]) / 2;
1977
1978 x_center[ch][rk][bl] = (uint8_t) ((temp1 + temp2) / 2);
1979
1980
1981 DPF(D_INFO,
1982 "VREF R/L eye lane%d : %d-%d %d-%d\n",
1983 bl,
1984 y_coordinate[R][B][ch][bl],
1985 y_coordinate[R][T][ch][bl],
1986 y_coordinate[L][B][ch][bl],
1987 y_coordinate[L][T][ch][bl]);
1988
1989
1990 temp1 = (y_coordinate[R][T][ch][bl] + y_coordinate[R][B][ch][bl]) / 2;
1991
1992 temp2 = (y_coordinate[L][T][ch][bl] + y_coordinate[L][B][ch][bl]) / 2;
1993
1994 y_center[ch][bl] = (uint8_t) ((temp1 + temp2) / 2);
1995 }
1996 }
1997 }
1998 }
1999 }
2000
2001#ifdef RX_EYE_CHECK
2002
2003 for (side_y = B; side_y <= T; side_y++) {
2004 for (side_x = L; side_x <= R; side_x++) {
2005 mrc_post_code(0x07, 0x30 + side_y * 2 + side_x);
2006
2007
2008 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2009 if (mrc_params->channel_enables & (1 << ch)) {
2010 for (rk = 0; rk < NUM_RANKS; rk++) {
2011 if (mrc_params->rank_enables & (1 << rk)) {
2012 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
2013 if (side_x == L)
2014 set_rdqs(ch, rk, bl, x_center[ch][rk][bl] - (MIN_RDQS_EYE / 2));
2015 else
2016 set_rdqs(ch, rk, bl, x_center[ch][rk][bl] + (MIN_RDQS_EYE / 2));
2017
2018 if (side_y == B)
2019 set_vref(ch, bl, y_center[ch][bl] - (MIN_VREF_EYE / 2));
2020 else
2021 set_vref(ch, bl, y_center[ch][bl] + (MIN_VREF_EYE / 2));
2022 }
2023 }
2024 }
2025 }
2026 }
2027
2028
2029 mrc_params->hte_setup = 1;
2030
2031
2032 if (check_bls_ex(mrc_params, address) & 0xff) {
2033
2034 mrc_post_code(0xee, 0x74 + side_x * 2 + side_y);
2035 }
2036 }
2037 }
2038#endif
2039
2040 mrc_post_code(0x07, 0x40);
2041
2042
2043 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2044 if (mrc_params->channel_enables & (1 << ch)) {
2045 for (rk = 0; rk < NUM_RANKS; rk++) {
2046 if (mrc_params->rank_enables & (1 << rk)) {
2047#ifdef R2R_SHARING
2048
2049 num_ranks_enabled++;
2050#endif
2051 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) {
2052
2053#ifdef R2R_SHARING
2054 final_delay[ch][bl] += x_center[ch][rk][bl];
2055 set_rdqs(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled);
2056#else
2057 set_rdqs(ch, rk, bl, x_center[ch][rk][bl]);
2058#endif
2059
2060 set_vref(ch, bl, y_center[ch][bl]);
2061 }
2062 }
2063 }
2064 }
2065 }
2066#endif
2067
2068 LEAVEFN();
2069}
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082void wr_train(struct mrc_params *mrc_params)
2083{
2084 uint8_t ch;
2085 uint8_t rk;
2086 uint8_t bl;
2087 uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1;
2088#ifdef BACKUP_WDQ
2089#else
2090 uint8_t side;
2091 uint32_t temp;
2092
2093 uint32_t delay[2][NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES];
2094 uint32_t address;
2095 uint32_t result;
2096 uint32_t bl_mask;
2097#ifdef R2R_SHARING
2098
2099 uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES];
2100
2101 uint32_t num_ranks_enabled = 0;
2102#endif
2103#endif
2104
2105
2106 mrc_post_code(0x08, 0x00);
2107
2108 ENTERFN();
2109
2110#ifdef BACKUP_WDQ
2111 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2112 if (mrc_params->channel_enables & (1 << ch)) {
2113 for (rk = 0; rk < NUM_RANKS; rk++) {
2114 if (mrc_params->rank_enables & (1 << rk)) {
2115 for (bl = 0;
2116 bl < NUM_BYTE_LANES / bl_divisor;
2117 bl++) {
2118 set_wdq(ch, rk, bl, ddr_wdq[PLATFORM_ID]);
2119 }
2120 }
2121 }
2122 }
2123 }
2124#else
2125
2126 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2127 if (mrc_params->channel_enables & (1 << ch)) {
2128 for (rk = 0; rk < NUM_RANKS; rk++) {
2129 if (mrc_params->rank_enables & (1 << rk)) {
2130 for (bl = 0;
2131 bl < NUM_BYTE_LANES / bl_divisor;
2132 bl++) {
2133
2134
2135
2136
2137
2138 temp = get_wdqs(ch, rk, bl) - QRTR_CLK;
2139 delay[L][ch][rk][bl] = temp - QRTR_CLK;
2140 delay[R][ch][rk][bl] = temp + QRTR_CLK;
2141 }
2142 }
2143 }
2144 }
2145 }
2146
2147
2148 bl_mask = byte_lane_mask(mrc_params);
2149 address = get_addr(0, 0);
2150
2151#ifdef R2R_SHARING
2152
2153 memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay));
2154#endif
2155
2156
2157
2158
2159
2160 for (side = L; side <= R; side++) {
2161 mrc_post_code(0x08, 0x10 + side);
2162
2163
2164 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2165 if (mrc_params->channel_enables & (1 << ch)) {
2166 for (rk = 0; rk < NUM_RANKS; rk++) {
2167 if (mrc_params->rank_enables &
2168 (1 << rk)) {
2169 for (bl = 0;
2170 bl < NUM_BYTE_LANES / bl_divisor;
2171 bl++) {
2172 set_wdq(ch, rk, bl, delay[side][ch][rk][bl]);
2173 }
2174 }
2175 }
2176 }
2177 }
2178
2179
2180 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2181 if (mrc_params->channel_enables & (1 << ch)) {
2182 for (rk = 0; rk < NUM_RANKS; rk++) {
2183 if (mrc_params->rank_enables &
2184 (1 << rk)) {
2185
2186 address = get_addr(ch, rk);
2187
2188
2189 mrc_params->hte_setup = 1;
2190
2191
2192 do {
2193
2194 result = check_bls_ex(mrc_params, address);
2195
2196 if (result & 0xff) {
2197
2198 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
2199 if (result &
2200 (bl_mask << bl)) {
2201 if (side == L)
2202 delay[L][ch][rk][bl] += WDQ_STEP;
2203 else
2204 delay[R][ch][rk][bl] -= WDQ_STEP;
2205
2206
2207 if (delay[L][ch][rk][bl] != delay[R][ch][rk][bl]) {
2208
2209
2210
2211
2212 set_wdq(ch, rk, bl,
2213 delay[side][ch][rk][bl]);
2214 } else {
2215
2216
2217
2218
2219 training_message(ch, rk, bl);
2220 mrc_post_code(0xee, 0x80 + side);
2221 }
2222 }
2223 }
2224 }
2225
2226 } while (result & 0xff);
2227 }
2228 }
2229 }
2230 }
2231 }
2232
2233
2234 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2235 if (mrc_params->channel_enables & (1 << ch)) {
2236 for (rk = 0; rk < NUM_RANKS; rk++) {
2237 if (mrc_params->rank_enables & (1 << rk)) {
2238#ifdef R2R_SHARING
2239
2240 num_ranks_enabled++;
2241#endif
2242 for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) {
2243 DPF(D_INFO,
2244 "WDQ eye rank%d lane%d : %d-%d\n",
2245 rk, bl,
2246 delay[L][ch][rk][bl],
2247 delay[R][ch][rk][bl]);
2248
2249 temp = (delay[R][ch][rk][bl] + delay[L][ch][rk][bl]) / 2;
2250
2251#ifdef R2R_SHARING
2252 final_delay[ch][bl] += temp;
2253 set_wdq(ch, rk, bl,
2254 final_delay[ch][bl] / num_ranks_enabled);
2255#else
2256 set_wdq(ch, rk, bl, temp);
2257#endif
2258 }
2259 }
2260 }
2261 }
2262 }
2263#endif
2264
2265 LEAVEFN();
2266}
2267
2268
2269
2270
2271
2272
2273
2274void store_timings(struct mrc_params *mrc_params)
2275{
2276 uint8_t ch, rk, bl;
2277 struct mrc_timings *mt = &mrc_params->timings;
2278
2279 for (ch = 0; ch < NUM_CHANNELS; ch++) {
2280 for (rk = 0; rk < NUM_RANKS; rk++) {
2281 for (bl = 0; bl < NUM_BYTE_LANES; bl++) {
2282 mt->rcvn[ch][rk][bl] = get_rcvn(ch, rk, bl);
2283 mt->rdqs[ch][rk][bl] = get_rdqs(ch, rk, bl);
2284 mt->wdqs[ch][rk][bl] = get_wdqs(ch, rk, bl);
2285 mt->wdq[ch][rk][bl] = get_wdq(ch, rk, bl);
2286
2287 if (rk == 0)
2288 mt->vref[ch][bl] = get_vref(ch, bl);
2289 }
2290
2291 mt->wctl[ch][rk] = get_wctl(ch, rk);
2292 }
2293
2294 mt->wcmd[ch] = get_wcmd(ch);
2295 }
2296
2297
2298 mt->ddr_speed = mrc_params->ddr_speed;
2299}
2300
2301
2302
2303
2304
2305void enable_scrambling(struct mrc_params *mrc_params)
2306{
2307 uint32_t lfsr = 0;
2308 uint8_t i;
2309
2310 if (mrc_params->scrambling_enables == 0)
2311 return;
2312
2313 ENTERFN();
2314
2315
2316 lfsr = mrc_params->timings.scrambler_seed;
2317
2318 if (mrc_params->boot_mode == BM_COLD) {
2319
2320
2321
2322
2323 if (lfsr == 0) {
2324
2325
2326
2327
2328 lfsr = rdtsc() & 0x0fffffff;
2329 } else {
2330
2331
2332
2333
2334
2335
2336 for (i = 0; i < 16; i++)
2337 lfsr32(&lfsr);
2338 }
2339
2340
2341 mrc_params->timings.scrambler_seed = lfsr;
2342 }
2343
2344
2345
2346
2347
2348 lfsr32(&lfsr);
2349 msg_port_write(MEM_CTLR, SCRMSEED, (lfsr & 0x0003ffff));
2350
2351 for (i = 0; i < 2; i++)
2352 msg_port_write(MEM_CTLR, SCRMLO + i, (lfsr & 0xaaaaaaaa));
2353
2354 LEAVEFN();
2355}
2356
2357
2358
2359
2360
2361void prog_ddr_control(struct mrc_params *mrc_params)
2362{
2363 u32 dsch;
2364 u32 dpmc0;
2365
2366 ENTERFN();
2367
2368 dsch = msg_port_read(MEM_CTLR, DSCH);
2369 dsch &= ~(DSCH_OOODIS | DSCH_OOOST3DIS | DSCH_NEWBYPDIS);
2370 msg_port_write(MEM_CTLR, DSCH, dsch);
2371
2372 dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
2373 dpmc0 &= ~DPMC0_DISPWRDN;
2374 dpmc0 |= (mrc_params->power_down_disable << 25);
2375 dpmc0 &= ~DPMC0_CLKGTDIS;
2376 dpmc0 &= ~DPMC0_PCLSTO_MASK;
2377 dpmc0 |= (4 << 16);
2378 dpmc0 |= DPMC0_PREAPWDEN;
2379 msg_port_write(MEM_CTLR, DPMC0, dpmc0);
2380
2381
2382 mrc_write_mask(MEM_CTLR, DPMC1, 0x20, 0x30);
2383
2384 LEAVEFN();
2385}
2386
2387
2388
2389
2390
2391void prog_dra_drb(struct mrc_params *mrc_params)
2392{
2393 u32 drp;
2394 u32 dco;
2395 u8 density = mrc_params->params.density;
2396
2397 ENTERFN();
2398
2399 dco = msg_port_read(MEM_CTLR, DCO);
2400 dco &= ~DCO_IC;
2401 msg_port_write(MEM_CTLR, DCO, dco);
2402
2403 drp = 0;
2404 if (mrc_params->rank_enables & 1)
2405 drp |= DRP_RKEN0;
2406 if (mrc_params->rank_enables & 2)
2407 drp |= DRP_RKEN1;
2408 if (mrc_params->dram_width == X16) {
2409 drp |= (1 << 4);
2410 drp |= (1 << 9);
2411 }
2412
2413
2414
2415
2416
2417 if (density == 0)
2418 density = 4;
2419
2420 drp |= ((density - 1) << 6);
2421 drp |= ((density - 1) << 11);
2422
2423
2424 drp |= (mrc_params->address_mode << 14);
2425
2426 msg_port_write(MEM_CTLR, DRP, drp);
2427
2428 dco &= ~DCO_PMICTL;
2429 dco |= DCO_IC;
2430 msg_port_write(MEM_CTLR, DCO, dco);
2431
2432 LEAVEFN();
2433}
2434
2435
2436void perform_wake(struct mrc_params *mrc_params)
2437{
2438 ENTERFN();
2439
2440 dram_wake_command();
2441
2442 LEAVEFN();
2443}
2444
2445
2446
2447
2448
2449void change_refresh_period(struct mrc_params *mrc_params)
2450{
2451 u32 drfc;
2452 u32 dcal;
2453 u32 dpmc0;
2454
2455 ENTERFN();
2456
2457 drfc = msg_port_read(MEM_CTLR, DRFC);
2458 drfc &= ~DRFC_TREFI_MASK;
2459 drfc |= (mrc_params->refresh_rate << 12);
2460 drfc |= DRFC_REFDBTCLR;
2461 msg_port_write(MEM_CTLR, DRFC, drfc);
2462
2463 dcal = msg_port_read(MEM_CTLR, DCAL);
2464 dcal &= ~DCAL_ZQCINT_MASK;
2465 dcal |= (3 << 8);
2466 msg_port_write(MEM_CTLR, DCAL, dcal);
2467
2468 dpmc0 = msg_port_read(MEM_CTLR, DPMC0);
2469 dpmc0 |= (DPMC0_DYNSREN | DPMC0_ENPHYCLKGATE);
2470 msg_port_write(MEM_CTLR, DPMC0, dpmc0);
2471
2472 LEAVEFN();
2473}
2474
2475
2476
2477
2478
2479void set_auto_refresh(struct mrc_params *mrc_params)
2480{
2481 uint32_t channel;
2482 uint32_t rank;
2483 uint32_t bl;
2484 uint32_t bl_divisor = 1;
2485 uint32_t temp;
2486
2487 ENTERFN();
2488
2489
2490
2491
2492
2493 for (channel = 0; channel < NUM_CHANNELS; channel++) {
2494 if (mrc_params->channel_enables & (1 << channel)) {
2495
2496 mrc_alt_write_mask(DDRPHY, CMPCTRL, 2, 2);
2497
2498
2499 switch (mrc_params->rd_odt_value) {
2500 case 0:
2501 temp = 0x3f;
2502 break;
2503 default:
2504 temp = 0x00;
2505 break;
2506 }
2507
2508 for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) {
2509
2510 mrc_alt_write_mask(DDRPHY,
2511 B0OVRCTL + bl * DDRIODQ_BL_OFFSET +
2512 channel * DDRIODQ_CH_OFFSET,
2513 temp << 10,
2514 0x003ffc00);
2515
2516
2517 mrc_alt_write_mask(DDRPHY,
2518 B1OVRCTL + bl * DDRIODQ_BL_OFFSET +
2519 channel * DDRIODQ_CH_OFFSET,
2520 temp << 10,
2521 0x003ffc00);
2522 }
2523
2524
2525 for (rank = 0; rank < NUM_RANKS; rank++) {
2526 if (mrc_params->rank_enables & (1 << rank))
2527 dram_init_command(DCMD_ZQCS(rank));
2528 }
2529 }
2530 }
2531
2532 clear_pointers();
2533
2534 LEAVEFN();
2535}
2536
2537
2538
2539
2540
2541
2542
2543void ecc_enable(struct mrc_params *mrc_params)
2544{
2545 u32 drp;
2546 u32 dsch;
2547 u32 ecc_ctrl;
2548
2549 if (mrc_params->ecc_enables == 0)
2550 return;
2551
2552 ENTERFN();
2553
2554
2555 drp = msg_port_read(MEM_CTLR, DRP);
2556 drp &= ~DRP_ADDRMAP_MASK;
2557 drp |= DRP_ADDRMAP_MAP1;
2558 drp |= DRP_PRI64BSPLITEN;
2559 msg_port_write(MEM_CTLR, DRP, drp);
2560
2561
2562 dsch = msg_port_read(MEM_CTLR, DSCH);
2563 dsch |= DSCH_NEWBYPDIS;
2564 msg_port_write(MEM_CTLR, DSCH, dsch);
2565
2566
2567 ecc_ctrl = (DECCCTRL_SBEEN | DECCCTRL_DBEEN | DECCCTRL_ENCBGEN);
2568 msg_port_write(MEM_CTLR, DECCCTRL, ecc_ctrl);
2569
2570
2571 mrc_params->mem_size -= mrc_params->mem_size / 8;
2572
2573
2574 if (mrc_params->boot_mode != BM_S3) {
2575 select_hte();
2576 hte_mem_init(mrc_params, MRC_MEM_INIT);
2577 select_mem_mgr();
2578 }
2579
2580 LEAVEFN();
2581}
2582
2583
2584
2585
2586
2587void memory_test(struct mrc_params *mrc_params)
2588{
2589 uint32_t result = 0;
2590
2591 ENTERFN();
2592
2593 select_hte();
2594 result = hte_mem_init(mrc_params, MRC_MEM_TEST);
2595 select_mem_mgr();
2596
2597 DPF(D_INFO, "Memory test result %x\n", result);
2598 mrc_params->status = ((result == 0) ? MRC_SUCCESS : MRC_E_MEMTEST);
2599 LEAVEFN();
2600}
2601
2602
2603void lock_registers(struct mrc_params *mrc_params)
2604{
2605 u32 dco;
2606
2607 ENTERFN();
2608
2609 dco = msg_port_read(MEM_CTLR, DCO);
2610 dco &= ~(DCO_PMICTL | DCO_PMIDIS);
2611 dco |= (DCO_DRPLOCK | DCO_CPGCLOCK);
2612 msg_port_write(MEM_CTLR, DCO, dco);
2613
2614 LEAVEFN();
2615}
2616