1
2
3
4
5#include <common.h>
6#include <clk.h>
7#include <debug_uart.h>
8#include <dm.h>
9#include <dt-structs.h>
10#include <ram.h>
11#include <regmap.h>
12#include <syscon.h>
13#include <asm/io.h>
14#include <asm/arch-rockchip/clock.h>
15#include <asm/arch-rockchip/cru_rk3328.h>
16#include <asm/arch-rockchip/grf_rk3328.h>
17#include <asm/arch-rockchip/sdram.h>
18#include <asm/arch-rockchip/sdram_rk3328.h>
19#include <asm/arch-rockchip/uart.h>
20
21struct dram_info {
22#ifdef CONFIG_TPL_BUILD
23 struct ddr_pctl_regs *pctl;
24 struct ddr_phy_regs *phy;
25 struct clk ddr_clk;
26 struct rk3328_cru *cru;
27 struct msch_regs *msch;
28 struct rk3328_ddr_grf_regs *ddr_grf;
29#endif
30 struct ram_info info;
31 struct rk3328_grf_regs *grf;
32};
33
34#ifdef CONFIG_TPL_BUILD
35
36struct rk3328_sdram_channel sdram_ch;
37
38struct rockchip_dmc_plat {
39#if CONFIG_IS_ENABLED(OF_PLATDATA)
40 struct dtd_rockchip_rk3328_dmc dtplat;
41#else
42 struct rk3328_sdram_params sdram_params;
43#endif
44 struct regmap *map;
45};
46
47#if CONFIG_IS_ENABLED(OF_PLATDATA)
48static int conv_of_platdata(struct udevice *dev)
49{
50 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
51 struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
52 int ret;
53
54 ret = regmap_init_mem_platdata(dev, dtplat->reg,
55 ARRAY_SIZE(dtplat->reg) / 2,
56 &plat->map);
57 if (ret)
58 return ret;
59
60 return 0;
61}
62#endif
63
64static void rkclk_ddr_reset(struct dram_info *dram,
65 u32 ctl_srstn, u32 ctl_psrstn,
66 u32 phy_srstn, u32 phy_psrstn)
67{
68 writel(ddrctrl_srstn_req(ctl_srstn) | ddrctrl_psrstn_req(ctl_psrstn) |
69 ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
70 &dram->cru->softrst_con[5]);
71 writel(ddrctrl_asrstn_req(ctl_srstn), &dram->cru->softrst_con[9]);
72}
73
74static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
75{
76 unsigned int refdiv, postdiv1, postdiv2, fbdiv;
77 int delay = 1000;
78 u32 mhz = hz / MHZ;
79
80 refdiv = 1;
81 if (mhz <= 300) {
82 postdiv1 = 4;
83 postdiv2 = 2;
84 } else if (mhz <= 400) {
85 postdiv1 = 6;
86 postdiv2 = 1;
87 } else if (mhz <= 600) {
88 postdiv1 = 4;
89 postdiv2 = 1;
90 } else if (mhz <= 800) {
91 postdiv1 = 3;
92 postdiv2 = 1;
93 } else if (mhz <= 1600) {
94 postdiv1 = 2;
95 postdiv2 = 1;
96 } else {
97 postdiv1 = 1;
98 postdiv2 = 1;
99 }
100 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
101
102 writel(((0x1 << 4) << 16) | (0 << 4), &dram->cru->mode_con);
103 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->dpll_con[0]);
104 writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
105 &dram->cru->dpll_con[1]);
106
107 while (delay > 0) {
108 udelay(1);
109 if (LOCK(readl(&dram->cru->dpll_con[1])))
110 break;
111 delay--;
112 }
113
114 writel(((0x1 << 4) << 16) | (1 << 4), &dram->cru->mode_con);
115}
116
117static void rkclk_configure_ddr(struct dram_info *dram,
118 struct rk3328_sdram_params *sdram_params)
119{
120 void __iomem *phy_base = dram->phy;
121
122
123 clrbits_le32(PHY_REG(phy_base, 0xef), 1 << 7);
124
125
126 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ * 2);
127}
128
129
130
131
132
133
134static u32 calculate_ddrconfig(struct rk3328_sdram_params *sdram_params)
135{
136 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
137 u32 cs, bw, die_bw, col, row, bank;
138 u32 cs1_row;
139 u32 i, tmp;
140 u32 ddrconf = -1;
141
142 cs = cap_info->rank;
143 bw = cap_info->bw;
144 die_bw = cap_info->dbw;
145 col = cap_info->col;
146 row = cap_info->cs0_row;
147 cs1_row = cap_info->cs1_row;
148 bank = cap_info->bk;
149
150 if (sdram_params->base.dramtype == DDR4) {
151
152 if (cs == 2 && row == cs1_row) {
153
154 tmp = ((row - 13) << 3) | (1 << 2) | (bw & 0x2) |
155 die_bw;
156 for (i = 17; i < 21; i++) {
157 if (((tmp & 0x7) ==
158 (ddr4_cfg_2_rbc[i - 10] & 0x7)) &&
159 ((tmp & 0x3c) <=
160 (ddr4_cfg_2_rbc[i - 10] & 0x3c))) {
161 ddrconf = i;
162 goto out;
163 }
164 }
165 }
166
167 tmp = ((cs - 1) << 6) | ((row - 13) << 3) | (bw & 0x2) | die_bw;
168 for (i = 10; i < 17; i++) {
169 if (((tmp & 0x7) == (ddr4_cfg_2_rbc[i - 10] & 0x7)) &&
170 ((tmp & 0x3c) <= (ddr4_cfg_2_rbc[i - 10] & 0x3c)) &&
171 ((tmp & 0x40) <= (ddr4_cfg_2_rbc[i - 10] & 0x40))) {
172 ddrconf = i;
173 goto out;
174 }
175 }
176 } else {
177 if (bank == 2) {
178 ddrconf = 8;
179 goto out;
180 }
181
182
183 if (cs == 2 && row == cs1_row) {
184
185 for (i = 5; i < 8; i++) {
186 if ((bw + col - 11) == (ddr_cfg_2_rbc[i] &
187 0x3)) {
188 ddrconf = i;
189 goto out;
190 }
191 }
192 }
193
194 tmp = ((row - 13) << 4) | (1 << 2) | ((bw + col - 11) << 0);
195 for (i = 0; i < 5; i++)
196 if (((tmp & 0xf) == (ddr_cfg_2_rbc[i] & 0xf)) &&
197 ((tmp & 0x30) <= (ddr_cfg_2_rbc[i] & 0x30))) {
198 ddrconf = i;
199 goto out;
200 }
201 }
202
203out:
204 if (ddrconf > 20)
205 printf("calculate ddrconfig error\n");
206
207 return ddrconf;
208}
209
210
211
212
213
214
215static void set_ctl_address_map(struct dram_info *dram,
216 struct rk3328_sdram_params *sdram_params)
217{
218 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
219 void __iomem *pctl_base = dram->pctl;
220
221 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
222 &addrmap[cap_info->ddrconfig][0], 9 * 4);
223 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
224 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
225 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
226 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
227
228 if (cap_info->rank == 1)
229 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
230}
231
232static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
233{
234 void __iomem *pctl_base = dram->pctl;
235 u32 dis_auto_zq = 0;
236 u32 pwrctl;
237 u32 ret;
238
239
240 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
241 writel(0, pctl_base + DDR_PCTL2_PWRCTL);
242
243 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
244
245 ret = phy_data_training(dram->phy, cs, dramtype);
246
247 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
248
249
250 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
251
252 return ret;
253}
254
255static void rx_deskew_switch_adjust(struct dram_info *dram)
256{
257 u32 i, deskew_val;
258 u32 gate_val = 0;
259 void __iomem *phy_base = dram->phy;
260
261 for (i = 0; i < 4; i++)
262 gate_val = MAX(readl(PHY_REG(phy_base, 0xfb + i)), gate_val);
263
264 deskew_val = (gate_val >> 3) + 1;
265 deskew_val = (deskew_val > 0x1f) ? 0x1f : deskew_val;
266 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xc, (deskew_val & 0x3) << 2);
267 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x7 << 4,
268 (deskew_val & 0x1c) << 2);
269}
270
271static void tx_deskew_switch_adjust(struct dram_info *dram)
272{
273 void __iomem *phy_base = dram->phy;
274
275 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0x3, 1);
276}
277
278static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
279{
280 writel(ddrconfig, &dram->msch->ddrconf);
281}
282
283static void sdram_msch_config(struct msch_regs *msch,
284 struct sdram_msch_timings *noc_timings)
285{
286 writel(noc_timings->ddrtiming.d32, &msch->ddrtiming);
287
288 writel(noc_timings->ddrmode.d32, &msch->ddrmode);
289 writel(noc_timings->readlatency, &msch->readlatency);
290
291 writel(noc_timings->activate.d32, &msch->activate);
292 writel(noc_timings->devtodev.d32, &msch->devtodev);
293 writel(noc_timings->ddr4timing.d32, &msch->ddr4_timing);
294 writel(noc_timings->agingx0, &msch->aging0);
295 writel(noc_timings->agingx0, &msch->aging1);
296 writel(noc_timings->agingx0, &msch->aging2);
297 writel(noc_timings->agingx0, &msch->aging3);
298 writel(noc_timings->agingx0, &msch->aging4);
299 writel(noc_timings->agingx0, &msch->aging5);
300}
301
302static void dram_all_config(struct dram_info *dram,
303 struct rk3328_sdram_params *sdram_params)
304{
305 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
306 u32 sys_reg2 = 0;
307 u32 sys_reg3 = 0;
308
309 set_ddrconfig(dram, cap_info->ddrconfig);
310 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
311 &sys_reg3, 0);
312 writel(sys_reg2, &dram->grf->os_reg[2]);
313 writel(sys_reg3, &dram->grf->os_reg[3]);
314
315 sdram_msch_config(dram->msch, &sdram_ch.noc_timings);
316}
317
318static void enable_low_power(struct dram_info *dram,
319 struct rk3328_sdram_params *sdram_params)
320{
321 void __iomem *pctl_base = dram->pctl;
322
323
324 writel(0x00800000, &dram->ddr_grf->ddr_grf_con[0]);
325 writel(0x20012001, &dram->ddr_grf->ddr_grf_con[2]);
326
327 writel(0x001e001a, &dram->ddr_grf->ddr_grf_con[2]);
328
329 if (PD_IDLE == 0)
330 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
331 else
332 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
333 if (SR_IDLE == 0)
334 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
335 else
336 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
337 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
338}
339
340static int sdram_init(struct dram_info *dram,
341 struct rk3328_sdram_params *sdram_params, u32 pre_init)
342{
343 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
344 void __iomem *pctl_base = dram->pctl;
345
346 rkclk_ddr_reset(dram, 1, 1, 1, 1);
347 udelay(10);
348
349
350
351
352
353 rkclk_ddr_reset(dram, 1, 1, 1, 0);
354 rkclk_configure_ddr(dram, sdram_params);
355
356
357 rkclk_ddr_reset(dram, 1, 1, 0, 0);
358 udelay(10);
359 phy_soft_reset(dram->phy);
360
361 rkclk_ddr_reset(dram, 1, 0, 0, 0);
362 pctl_cfg(dram->pctl, &sdram_params->pctl_regs, SR_IDLE, PD_IDLE);
363 cap_info->ddrconfig = calculate_ddrconfig(sdram_params);
364 set_ctl_address_map(dram, sdram_params);
365 phy_cfg(dram->phy, &sdram_params->phy_regs, &sdram_params->skew,
366 &sdram_params->base, cap_info->bw);
367
368
369 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
370 rkclk_ddr_reset(dram, 0, 0, 0, 0);
371
372 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
373 continue;
374
375
376 if (data_training(dram, 0, sdram_params->base.dramtype) != 0) {
377 printf("data training error\n");
378 return -1;
379 }
380 if (data_training(dram, 1, sdram_params->base.dramtype) != 0) {
381 printf("data training error\n");
382 return -1;
383 }
384
385 if (sdram_params->base.dramtype == DDR4)
386 pctl_write_vrefdq(dram->pctl, 0x3, 5670,
387 sdram_params->base.dramtype);
388
389 if (pre_init == 0) {
390 rx_deskew_switch_adjust(dram);
391 tx_deskew_switch_adjust(dram);
392 }
393
394 dram_all_config(dram, sdram_params);
395 enable_low_power(dram, sdram_params);
396
397 return 0;
398}
399
400static u64 dram_detect_cap(struct dram_info *dram,
401 struct rk3328_sdram_params *sdram_params,
402 unsigned char channel)
403{
404 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
405
406
407
408
409
410
411
412 u32 bk, bktmp;
413 u32 col, coltmp;
414 u32 rowtmp;
415 u32 cs;
416 u32 bw = 1;
417 u32 dram_type = sdram_params->base.dramtype;
418
419 if (dram_type != DDR4) {
420
421 coltmp = 12;
422 bktmp = 3;
423 rowtmp = 16;
424
425 if (sdram_detect_col(cap_info, coltmp) != 0)
426 goto cap_err;
427 sdram_detect_bank(cap_info, coltmp, bktmp);
428 sdram_detect_dbw(cap_info, dram_type);
429 } else {
430
431 coltmp = 10;
432 bktmp = 4;
433 rowtmp = 17;
434
435 col = 10;
436 bk = 2;
437 cap_info->col = col;
438 cap_info->bk = bk;
439 sdram_detect_bg(cap_info, coltmp);
440 }
441
442
443 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
444 goto cap_err;
445
446
447 sdram_detect_row_3_4(cap_info, coltmp, bktmp);
448
449
450 if (data_training(dram, 1, dram_type) == 0)
451 cs = 1;
452 else
453 cs = 0;
454 cap_info->rank = cs + 1;
455
456 bw = 2;
457 cap_info->bw = bw;
458
459 cap_info->cs0_high16bit_row = cap_info->cs0_row;
460 if (cs) {
461 cap_info->cs1_row = cap_info->cs0_row;
462 cap_info->cs1_high16bit_row = cap_info->cs0_row;
463 } else {
464 cap_info->cs1_row = 0;
465 cap_info->cs1_high16bit_row = 0;
466 }
467
468 return 0;
469cap_err:
470 return -1;
471}
472
473static int sdram_init_detect(struct dram_info *dram,
474 struct rk3328_sdram_params *sdram_params)
475{
476 u32 sys_reg = 0;
477 u32 sys_reg3 = 0;
478 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
479
480 debug("Starting SDRAM initialization...\n");
481
482 memcpy(&sdram_ch, &sdram_params->ch,
483 sizeof(struct rk3328_sdram_channel));
484
485 sdram_init(dram, sdram_params, 1);
486 dram_detect_cap(dram, sdram_params, 0);
487
488
489 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
490 sdram_params->base.dramtype);
491
492 if (cap_info->bw == 2)
493 sdram_ch.noc_timings.ddrtiming.b.bwratio = 0;
494 else
495 sdram_ch.noc_timings.ddrtiming.b.bwratio = 1;
496
497
498 sdram_init(dram, sdram_params, 0);
499
500
501 sdram_detect_cs1_row(cap_info, sdram_params->base.dramtype);
502 if (cap_info->cs1_row) {
503 sys_reg = readl(&dram->grf->os_reg[2]);
504 sys_reg3 = readl(&dram->grf->os_reg[3]);
505 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
506 sys_reg, sys_reg3, 0);
507 writel(sys_reg, &dram->grf->os_reg[2]);
508 writel(sys_reg3, &dram->grf->os_reg[3]);
509 }
510
511 sdram_print_ddr_info(&sdram_params->ch.cap_info, &sdram_params->base);
512
513 return 0;
514}
515
516static int rk3328_dmc_init(struct udevice *dev)
517{
518 struct dram_info *priv = dev_get_priv(dev);
519 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
520 int ret;
521
522#if !CONFIG_IS_ENABLED(OF_PLATDATA)
523 struct rk3328_sdram_params *params = &plat->sdram_params;
524#else
525 struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
526 struct rk3328_sdram_params *params =
527 (void *)dtplat->rockchip_sdram_params;
528
529 ret = conv_of_platdata(dev);
530 if (ret)
531 return ret;
532#endif
533 priv->phy = regmap_get_range(plat->map, 0);
534 priv->pctl = regmap_get_range(plat->map, 1);
535 priv->grf = regmap_get_range(plat->map, 2);
536 priv->cru = regmap_get_range(plat->map, 3);
537 priv->msch = regmap_get_range(plat->map, 4);
538 priv->ddr_grf = regmap_get_range(plat->map, 5);
539
540 debug("%s phy %p pctrl %p grf %p cru %p msch %p ddr_grf %p\n",
541 __func__, priv->phy, priv->pctl, priv->grf, priv->cru,
542 priv->msch, priv->ddr_grf);
543 ret = sdram_init_detect(priv, params);
544 if (ret < 0) {
545 printf("%s DRAM init failed%d\n", __func__, ret);
546 return ret;
547 }
548
549 return 0;
550}
551
552static int rk3328_dmc_ofdata_to_platdata(struct udevice *dev)
553{
554#if !CONFIG_IS_ENABLED(OF_PLATDATA)
555 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
556 int ret;
557
558 ret = dev_read_u32_array(dev, "rockchip,sdram-params",
559 (u32 *)&plat->sdram_params,
560 sizeof(plat->sdram_params) / sizeof(u32));
561 if (ret) {
562 printf("%s: Cannot read rockchip,sdram-params %d\n",
563 __func__, ret);
564 return ret;
565 }
566 ret = regmap_init_mem(dev, &plat->map);
567 if (ret)
568 printf("%s: regmap failed %d\n", __func__, ret);
569#endif
570 return 0;
571}
572
573#endif
574
575static int rk3328_dmc_probe(struct udevice *dev)
576{
577#ifdef CONFIG_TPL_BUILD
578 if (rk3328_dmc_init(dev))
579 return 0;
580#else
581 struct dram_info *priv = dev_get_priv(dev);
582
583 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
584 debug("%s: grf=%p\n", __func__, priv->grf);
585 priv->info.base = CONFIG_SYS_SDRAM_BASE;
586 priv->info.size = rockchip_sdram_size(
587 (phys_addr_t)&priv->grf->os_reg[2]);
588#endif
589 return 0;
590}
591
592static int rk3328_dmc_get_info(struct udevice *dev, struct ram_info *info)
593{
594 struct dram_info *priv = dev_get_priv(dev);
595
596 *info = priv->info;
597
598 return 0;
599}
600
601static struct ram_ops rk3328_dmc_ops = {
602 .get_info = rk3328_dmc_get_info,
603};
604
605static const struct udevice_id rk3328_dmc_ids[] = {
606 { .compatible = "rockchip,rk3328-dmc" },
607 { }
608};
609
610U_BOOT_DRIVER(dmc_rk3328) = {
611 .name = "rockchip_rk3328_dmc",
612 .id = UCLASS_RAM,
613 .of_match = rk3328_dmc_ids,
614 .ops = &rk3328_dmc_ops,
615#ifdef CONFIG_TPL_BUILD
616 .ofdata_to_platdata = rk3328_dmc_ofdata_to_platdata,
617#endif
618 .probe = rk3328_dmc_probe,
619 .priv_auto_alloc_size = sizeof(struct dram_info),
620#ifdef CONFIG_TPL_BUILD
621 .platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),
622#endif
623};
624