1
2
3
4
5
6#include <linux/clk.h>
7#include <linux/clk-provider.h>
8
9#include "dsi_phy.h"
10#include "dsi.xml.h"
11#include "dsi_phy_28nm.xml.h"
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31#define POLL_MAX_READS 10
32#define POLL_TIMEOUT_US 50
33
34#define VCO_REF_CLK_RATE 19200000
35#define VCO_MIN_RATE 350000000
36#define VCO_MAX_RATE 750000000
37
38
39#define DSI_PHY_28NM_QUIRK_PHY_LP BIT(0)
40
41#define LPFR_LUT_SIZE 10
42struct lpfr_cfg {
43 unsigned long vco_rate;
44 u32 resistance;
45};
46
47
48static const struct lpfr_cfg lpfr_lut[LPFR_LUT_SIZE] = {
49 { 479500000, 8 },
50 { 480000000, 11 },
51 { 575500000, 8 },
52 { 576000000, 12 },
53 { 610500000, 8 },
54 { 659500000, 9 },
55 { 671500000, 10 },
56 { 672000000, 14 },
57 { 708500000, 10 },
58 { 750000000, 11 },
59};
60
61struct pll_28nm_cached_state {
62 unsigned long vco_rate;
63 u8 postdiv3;
64 u8 postdiv1;
65 u8 byte_mux;
66};
67
68struct dsi_pll_28nm {
69 struct clk_hw clk_hw;
70
71 struct msm_dsi_phy *phy;
72
73 struct pll_28nm_cached_state cached_state;
74};
75
76#define to_pll_28nm(x) container_of(x, struct dsi_pll_28nm, clk_hw)
77
78static bool pll_28nm_poll_for_ready(struct dsi_pll_28nm *pll_28nm,
79 u32 nb_tries, u32 timeout_us)
80{
81 bool pll_locked = false;
82 u32 val;
83
84 while (nb_tries--) {
85 val = dsi_phy_read(pll_28nm->phy->pll_base + REG_DSI_28nm_PHY_PLL_STATUS);
86 pll_locked = !!(val & DSI_28nm_PHY_PLL_STATUS_PLL_RDY);
87
88 if (pll_locked)
89 break;
90
91 udelay(timeout_us);
92 }
93 DBG("DSI PLL is %slocked", pll_locked ? "" : "*not* ");
94
95 return pll_locked;
96}
97
98static void pll_28nm_software_reset(struct dsi_pll_28nm *pll_28nm)
99{
100 void __iomem *base = pll_28nm->phy->pll_base;
101
102
103
104
105
106 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_TEST_CFG,
107 DSI_28nm_PHY_PLL_TEST_CFG_PLL_SW_RESET, 1);
108 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_TEST_CFG, 0x00, 1);
109}
110
111
112
113
114static int dsi_pll_28nm_clk_set_rate(struct clk_hw *hw, unsigned long rate,
115 unsigned long parent_rate)
116{
117 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
118 struct device *dev = &pll_28nm->phy->pdev->dev;
119 void __iomem *base = pll_28nm->phy->pll_base;
120 unsigned long div_fbx1000, gen_vco_clk;
121 u32 refclk_cfg, frac_n_mode, frac_n_value;
122 u32 sdm_cfg0, sdm_cfg1, sdm_cfg2, sdm_cfg3;
123 u32 cal_cfg10, cal_cfg11;
124 u32 rem;
125 int i;
126
127 VERB("rate=%lu, parent's=%lu", rate, parent_rate);
128
129
130 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_POSTDIV2_CFG, 3);
131
132
133 for (i = 0; i < LPFR_LUT_SIZE; i++)
134 if (rate <= lpfr_lut[i].vco_rate)
135 break;
136 if (i == LPFR_LUT_SIZE) {
137 DRM_DEV_ERROR(dev, "unable to get loop filter resistance. vco=%lu\n",
138 rate);
139 return -EINVAL;
140 }
141 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_LPFR_CFG, lpfr_lut[i].resistance);
142
143
144 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_LPFC1_CFG, 0x70);
145 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_LPFC2_CFG, 0x15);
146
147 rem = rate % VCO_REF_CLK_RATE;
148 if (rem) {
149 refclk_cfg = DSI_28nm_PHY_PLL_REFCLK_CFG_DBLR;
150 frac_n_mode = 1;
151 div_fbx1000 = rate / (VCO_REF_CLK_RATE / 500);
152 gen_vco_clk = div_fbx1000 * (VCO_REF_CLK_RATE / 500);
153 } else {
154 refclk_cfg = 0x0;
155 frac_n_mode = 0;
156 div_fbx1000 = rate / (VCO_REF_CLK_RATE / 1000);
157 gen_vco_clk = div_fbx1000 * (VCO_REF_CLK_RATE / 1000);
158 }
159
160 DBG("refclk_cfg = %d", refclk_cfg);
161
162 rem = div_fbx1000 % 1000;
163 frac_n_value = (rem << 16) / 1000;
164
165 DBG("div_fb = %lu", div_fbx1000);
166 DBG("frac_n_value = %d", frac_n_value);
167
168 DBG("Generated VCO Clock: %lu", gen_vco_clk);
169 rem = 0;
170 sdm_cfg1 = dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_SDM_CFG1);
171 sdm_cfg1 &= ~DSI_28nm_PHY_PLL_SDM_CFG1_DC_OFFSET__MASK;
172 if (frac_n_mode) {
173 sdm_cfg0 = 0x0;
174 sdm_cfg0 |= DSI_28nm_PHY_PLL_SDM_CFG0_BYP_DIV(0);
175 sdm_cfg1 |= DSI_28nm_PHY_PLL_SDM_CFG1_DC_OFFSET(
176 (u32)(((div_fbx1000 / 1000) & 0x3f) - 1));
177 sdm_cfg3 = frac_n_value >> 8;
178 sdm_cfg2 = frac_n_value & 0xff;
179 } else {
180 sdm_cfg0 = DSI_28nm_PHY_PLL_SDM_CFG0_BYP;
181 sdm_cfg0 |= DSI_28nm_PHY_PLL_SDM_CFG0_BYP_DIV(
182 (u32)(((div_fbx1000 / 1000) & 0x3f) - 1));
183 sdm_cfg1 |= DSI_28nm_PHY_PLL_SDM_CFG1_DC_OFFSET(0);
184 sdm_cfg2 = 0;
185 sdm_cfg3 = 0;
186 }
187
188 DBG("sdm_cfg0=%d", sdm_cfg0);
189 DBG("sdm_cfg1=%d", sdm_cfg1);
190 DBG("sdm_cfg2=%d", sdm_cfg2);
191 DBG("sdm_cfg3=%d", sdm_cfg3);
192
193 cal_cfg11 = (u32)(gen_vco_clk / (256 * 1000000));
194 cal_cfg10 = (u32)((gen_vco_clk % (256 * 1000000)) / 1000000);
195 DBG("cal_cfg10=%d, cal_cfg11=%d", cal_cfg10, cal_cfg11);
196
197 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_CHGPUMP_CFG, 0x02);
198 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_CAL_CFG3, 0x2b);
199 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_CAL_CFG4, 0x06);
200 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2, 0x0d);
201
202 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_SDM_CFG1, sdm_cfg1);
203 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_SDM_CFG2,
204 DSI_28nm_PHY_PLL_SDM_CFG2_FREQ_SEED_7_0(sdm_cfg2));
205 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_SDM_CFG3,
206 DSI_28nm_PHY_PLL_SDM_CFG3_FREQ_SEED_15_8(sdm_cfg3));
207 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_SDM_CFG4, 0x00);
208
209
210 if (pll_28nm->phy->cfg->quirks & DSI_PHY_28NM_QUIRK_PHY_LP)
211 udelay(1000);
212 else
213 udelay(1);
214
215 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_REFCLK_CFG, refclk_cfg);
216 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_PWRGEN_CFG, 0x00);
217 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_VCOLPF_CFG, 0x31);
218 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_SDM_CFG0, sdm_cfg0);
219 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_CAL_CFG0, 0x12);
220 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_CAL_CFG6, 0x30);
221 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_CAL_CFG7, 0x00);
222 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_CAL_CFG8, 0x60);
223 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_CAL_CFG9, 0x00);
224 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_CAL_CFG10, cal_cfg10 & 0xff);
225 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_CAL_CFG11, cal_cfg11 & 0xff);
226 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_EFUSE_CFG, 0x20);
227
228 return 0;
229}
230
231static int dsi_pll_28nm_clk_is_enabled(struct clk_hw *hw)
232{
233 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
234
235 return pll_28nm_poll_for_ready(pll_28nm, POLL_MAX_READS,
236 POLL_TIMEOUT_US);
237}
238
239static unsigned long dsi_pll_28nm_clk_recalc_rate(struct clk_hw *hw,
240 unsigned long parent_rate)
241{
242 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
243 void __iomem *base = pll_28nm->phy->pll_base;
244 u32 sdm0, doubler, sdm_byp_div;
245 u32 sdm_dc_off, sdm_freq_seed, sdm2, sdm3;
246 u32 ref_clk = VCO_REF_CLK_RATE;
247 unsigned long vco_rate;
248
249 VERB("parent_rate=%lu", parent_rate);
250
251
252 doubler = dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_REFCLK_CFG) &
253 DSI_28nm_PHY_PLL_REFCLK_CFG_DBLR;
254 ref_clk += (doubler * VCO_REF_CLK_RATE);
255
256
257 sdm0 = dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_SDM_CFG0);
258 if (sdm0 & DSI_28nm_PHY_PLL_SDM_CFG0_BYP) {
259
260 sdm_byp_div = FIELD(
261 dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_SDM_CFG0),
262 DSI_28nm_PHY_PLL_SDM_CFG0_BYP_DIV) + 1;
263 vco_rate = ref_clk * sdm_byp_div;
264 } else {
265
266 sdm_dc_off = FIELD(
267 dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_SDM_CFG1),
268 DSI_28nm_PHY_PLL_SDM_CFG1_DC_OFFSET);
269 DBG("sdm_dc_off = %d", sdm_dc_off);
270 sdm2 = FIELD(dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_SDM_CFG2),
271 DSI_28nm_PHY_PLL_SDM_CFG2_FREQ_SEED_7_0);
272 sdm3 = FIELD(dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_SDM_CFG3),
273 DSI_28nm_PHY_PLL_SDM_CFG3_FREQ_SEED_15_8);
274 sdm_freq_seed = (sdm3 << 8) | sdm2;
275 DBG("sdm_freq_seed = %d", sdm_freq_seed);
276
277 vco_rate = (ref_clk * (sdm_dc_off + 1)) +
278 mult_frac(ref_clk, sdm_freq_seed, BIT(16));
279 DBG("vco rate = %lu", vco_rate);
280 }
281
282 DBG("returning vco rate = %lu", vco_rate);
283
284 return vco_rate;
285}
286
287static int _dsi_pll_28nm_vco_prepare_hpm(struct dsi_pll_28nm *pll_28nm)
288{
289 struct device *dev = &pll_28nm->phy->pdev->dev;
290 void __iomem *base = pll_28nm->phy->pll_base;
291 u32 max_reads = 5, timeout_us = 100;
292 bool locked;
293 u32 val;
294 int i;
295
296 DBG("id=%d", pll_28nm->phy->id);
297
298 pll_28nm_software_reset(pll_28nm);
299
300
301
302
303
304 val = DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRDN_B;
305 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 1);
306
307 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRGEN_PWRDN_B;
308 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 200);
309
310 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B;
311 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 500);
312
313 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_ENABLE;
314 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 600);
315
316 for (i = 0; i < 2; i++) {
317
318 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2,
319 0x0c, 100);
320 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2, 0x0d);
321
322
323 locked = pll_28nm_poll_for_ready(pll_28nm,
324 max_reads, timeout_us);
325 if (locked)
326 break;
327
328 pll_28nm_software_reset(pll_28nm);
329
330
331
332
333
334 val = DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRDN_B;
335 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 1);
336
337 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRGEN_PWRDN_B;
338 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 200);
339
340 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B;
341 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 250);
342
343 val &= ~DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B;
344 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 200);
345
346 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B;
347 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 500);
348
349 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_ENABLE;
350 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 600);
351 }
352
353 if (unlikely(!locked))
354 DRM_DEV_ERROR(dev, "DSI PLL lock failed\n");
355 else
356 DBG("DSI PLL Lock success");
357
358 return locked ? 0 : -EINVAL;
359}
360
361static int dsi_pll_28nm_vco_prepare_hpm(struct clk_hw *hw)
362{
363 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
364 int i, ret;
365
366 if (unlikely(pll_28nm->phy->pll_on))
367 return 0;
368
369 for (i = 0; i < 3; i++) {
370 ret = _dsi_pll_28nm_vco_prepare_hpm(pll_28nm);
371 if (!ret) {
372 pll_28nm->phy->pll_on = true;
373 return 0;
374 }
375 }
376
377 return ret;
378}
379
380static int dsi_pll_28nm_vco_prepare_lp(struct clk_hw *hw)
381{
382 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
383 struct device *dev = &pll_28nm->phy->pdev->dev;
384 void __iomem *base = pll_28nm->phy->pll_base;
385 bool locked;
386 u32 max_reads = 10, timeout_us = 50;
387 u32 val;
388
389 DBG("id=%d", pll_28nm->phy->id);
390
391 if (unlikely(pll_28nm->phy->pll_on))
392 return 0;
393
394 pll_28nm_software_reset(pll_28nm);
395
396
397
398
399
400 dsi_phy_write_ndelay(base + REG_DSI_28nm_PHY_PLL_CAL_CFG1, 0x34, 500);
401
402 val = DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRDN_B;
403 dsi_phy_write_ndelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 500);
404
405 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRGEN_PWRDN_B;
406 dsi_phy_write_ndelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 500);
407
408 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B |
409 DSI_28nm_PHY_PLL_GLB_CFG_PLL_ENABLE;
410 dsi_phy_write_ndelay(base + REG_DSI_28nm_PHY_PLL_GLB_CFG, val, 500);
411
412
413 dsi_phy_write_ndelay(base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2, 0x04, 500);
414 dsi_phy_write_udelay(base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2, 0x05, 512);
415
416 locked = pll_28nm_poll_for_ready(pll_28nm, max_reads, timeout_us);
417
418 if (unlikely(!locked)) {
419 DRM_DEV_ERROR(dev, "DSI PLL lock failed\n");
420 return -EINVAL;
421 }
422
423 DBG("DSI PLL lock success");
424 pll_28nm->phy->pll_on = true;
425
426 return 0;
427}
428
429static void dsi_pll_28nm_vco_unprepare(struct clk_hw *hw)
430{
431 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
432
433 DBG("id=%d", pll_28nm->phy->id);
434
435 if (unlikely(!pll_28nm->phy->pll_on))
436 return;
437
438 dsi_phy_write(pll_28nm->phy->pll_base + REG_DSI_28nm_PHY_PLL_GLB_CFG, 0x00);
439
440 pll_28nm->phy->pll_on = false;
441}
442
443static long dsi_pll_28nm_clk_round_rate(struct clk_hw *hw,
444 unsigned long rate, unsigned long *parent_rate)
445{
446 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
447
448 if (rate < pll_28nm->phy->cfg->min_pll_rate)
449 return pll_28nm->phy->cfg->min_pll_rate;
450 else if (rate > pll_28nm->phy->cfg->max_pll_rate)
451 return pll_28nm->phy->cfg->max_pll_rate;
452 else
453 return rate;
454}
455
456static const struct clk_ops clk_ops_dsi_pll_28nm_vco_hpm = {
457 .round_rate = dsi_pll_28nm_clk_round_rate,
458 .set_rate = dsi_pll_28nm_clk_set_rate,
459 .recalc_rate = dsi_pll_28nm_clk_recalc_rate,
460 .prepare = dsi_pll_28nm_vco_prepare_hpm,
461 .unprepare = dsi_pll_28nm_vco_unprepare,
462 .is_enabled = dsi_pll_28nm_clk_is_enabled,
463};
464
465static const struct clk_ops clk_ops_dsi_pll_28nm_vco_lp = {
466 .round_rate = dsi_pll_28nm_clk_round_rate,
467 .set_rate = dsi_pll_28nm_clk_set_rate,
468 .recalc_rate = dsi_pll_28nm_clk_recalc_rate,
469 .prepare = dsi_pll_28nm_vco_prepare_lp,
470 .unprepare = dsi_pll_28nm_vco_unprepare,
471 .is_enabled = dsi_pll_28nm_clk_is_enabled,
472};
473
474
475
476
477
478static void dsi_28nm_pll_save_state(struct msm_dsi_phy *phy)
479{
480 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(phy->vco_hw);
481 struct pll_28nm_cached_state *cached_state = &pll_28nm->cached_state;
482 void __iomem *base = pll_28nm->phy->pll_base;
483
484 cached_state->postdiv3 =
485 dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_POSTDIV3_CFG);
486 cached_state->postdiv1 =
487 dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_POSTDIV1_CFG);
488 cached_state->byte_mux = dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_VREG_CFG);
489 if (dsi_pll_28nm_clk_is_enabled(phy->vco_hw))
490 cached_state->vco_rate = clk_hw_get_rate(phy->vco_hw);
491 else
492 cached_state->vco_rate = 0;
493}
494
495static int dsi_28nm_pll_restore_state(struct msm_dsi_phy *phy)
496{
497 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(phy->vco_hw);
498 struct pll_28nm_cached_state *cached_state = &pll_28nm->cached_state;
499 void __iomem *base = pll_28nm->phy->pll_base;
500 int ret;
501
502 ret = dsi_pll_28nm_clk_set_rate(phy->vco_hw,
503 cached_state->vco_rate, 0);
504 if (ret) {
505 DRM_DEV_ERROR(&pll_28nm->phy->pdev->dev,
506 "restore vco rate failed. ret=%d\n", ret);
507 return ret;
508 }
509
510 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_POSTDIV3_CFG,
511 cached_state->postdiv3);
512 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_POSTDIV1_CFG,
513 cached_state->postdiv1);
514 dsi_phy_write(base + REG_DSI_28nm_PHY_PLL_VREG_CFG,
515 cached_state->byte_mux);
516
517 return 0;
518}
519
520static int pll_28nm_register(struct dsi_pll_28nm *pll_28nm, struct clk_hw **provided_clocks)
521{
522 char clk_name[32], parent1[32], parent2[32], vco_name[32];
523 struct clk_init_data vco_init = {
524 .parent_names = (const char *[]){ "xo" },
525 .num_parents = 1,
526 .name = vco_name,
527 .flags = CLK_IGNORE_UNUSED,
528 };
529 struct device *dev = &pll_28nm->phy->pdev->dev;
530 struct clk_hw *hw;
531 int ret;
532
533 DBG("%d", pll_28nm->phy->id);
534
535 if (pll_28nm->phy->cfg->quirks & DSI_PHY_28NM_QUIRK_PHY_LP)
536 vco_init.ops = &clk_ops_dsi_pll_28nm_vco_lp;
537 else
538 vco_init.ops = &clk_ops_dsi_pll_28nm_vco_hpm;
539
540 snprintf(vco_name, 32, "dsi%dvco_clk", pll_28nm->phy->id);
541 pll_28nm->clk_hw.init = &vco_init;
542 ret = devm_clk_hw_register(dev, &pll_28nm->clk_hw);
543 if (ret)
544 return ret;
545
546 snprintf(clk_name, 32, "dsi%danalog_postdiv_clk", pll_28nm->phy->id);
547 snprintf(parent1, 32, "dsi%dvco_clk", pll_28nm->phy->id);
548 hw = devm_clk_hw_register_divider(dev, clk_name,
549 parent1, CLK_SET_RATE_PARENT,
550 pll_28nm->phy->pll_base +
551 REG_DSI_28nm_PHY_PLL_POSTDIV1_CFG,
552 0, 4, 0, NULL);
553 if (IS_ERR(hw))
554 return PTR_ERR(hw);
555
556 snprintf(clk_name, 32, "dsi%dindirect_path_div2_clk", pll_28nm->phy->id);
557 snprintf(parent1, 32, "dsi%danalog_postdiv_clk", pll_28nm->phy->id);
558 hw = devm_clk_hw_register_fixed_factor(dev, clk_name,
559 parent1, CLK_SET_RATE_PARENT,
560 1, 2);
561 if (IS_ERR(hw))
562 return PTR_ERR(hw);
563
564 snprintf(clk_name, 32, "dsi%dpll", pll_28nm->phy->id);
565 snprintf(parent1, 32, "dsi%dvco_clk", pll_28nm->phy->id);
566 hw = devm_clk_hw_register_divider(dev, clk_name,
567 parent1, 0, pll_28nm->phy->pll_base +
568 REG_DSI_28nm_PHY_PLL_POSTDIV3_CFG,
569 0, 8, 0, NULL);
570 if (IS_ERR(hw))
571 return PTR_ERR(hw);
572 provided_clocks[DSI_PIXEL_PLL_CLK] = hw;
573
574 snprintf(clk_name, 32, "dsi%dbyte_mux", pll_28nm->phy->id);
575 snprintf(parent1, 32, "dsi%dvco_clk", pll_28nm->phy->id);
576 snprintf(parent2, 32, "dsi%dindirect_path_div2_clk", pll_28nm->phy->id);
577 hw = devm_clk_hw_register_mux(dev, clk_name,
578 ((const char *[]){
579 parent1, parent2
580 }), 2, CLK_SET_RATE_PARENT, pll_28nm->phy->pll_base +
581 REG_DSI_28nm_PHY_PLL_VREG_CFG, 1, 1, 0, NULL);
582 if (IS_ERR(hw))
583 return PTR_ERR(hw);
584
585 snprintf(clk_name, 32, "dsi%dpllbyte", pll_28nm->phy->id);
586 snprintf(parent1, 32, "dsi%dbyte_mux", pll_28nm->phy->id);
587 hw = devm_clk_hw_register_fixed_factor(dev, clk_name,
588 parent1, CLK_SET_RATE_PARENT, 1, 4);
589 if (IS_ERR(hw))
590 return PTR_ERR(hw);
591 provided_clocks[DSI_BYTE_PLL_CLK] = hw;
592
593 return 0;
594}
595
596static int dsi_pll_28nm_init(struct msm_dsi_phy *phy)
597{
598 struct platform_device *pdev = phy->pdev;
599 struct dsi_pll_28nm *pll_28nm;
600 int ret;
601
602 if (!pdev)
603 return -ENODEV;
604
605 pll_28nm = devm_kzalloc(&pdev->dev, sizeof(*pll_28nm), GFP_KERNEL);
606 if (!pll_28nm)
607 return -ENOMEM;
608
609 pll_28nm->phy = phy;
610
611 ret = pll_28nm_register(pll_28nm, phy->provided_clocks->hws);
612 if (ret) {
613 DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret);
614 return ret;
615 }
616
617 phy->vco_hw = &pll_28nm->clk_hw;
618
619 return 0;
620}
621
622static void dsi_28nm_dphy_set_timing(struct msm_dsi_phy *phy,
623 struct msm_dsi_dphy_timing *timing)
624{
625 void __iomem *base = phy->base;
626
627 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_0,
628 DSI_28nm_PHY_TIMING_CTRL_0_CLK_ZERO(timing->clk_zero));
629 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_1,
630 DSI_28nm_PHY_TIMING_CTRL_1_CLK_TRAIL(timing->clk_trail));
631 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_2,
632 DSI_28nm_PHY_TIMING_CTRL_2_CLK_PREPARE(timing->clk_prepare));
633 if (timing->clk_zero & BIT(8))
634 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_3,
635 DSI_28nm_PHY_TIMING_CTRL_3_CLK_ZERO_8);
636 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_4,
637 DSI_28nm_PHY_TIMING_CTRL_4_HS_EXIT(timing->hs_exit));
638 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_5,
639 DSI_28nm_PHY_TIMING_CTRL_5_HS_ZERO(timing->hs_zero));
640 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_6,
641 DSI_28nm_PHY_TIMING_CTRL_6_HS_PREPARE(timing->hs_prepare));
642 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_7,
643 DSI_28nm_PHY_TIMING_CTRL_7_HS_TRAIL(timing->hs_trail));
644 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_8,
645 DSI_28nm_PHY_TIMING_CTRL_8_HS_RQST(timing->hs_rqst));
646 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_9,
647 DSI_28nm_PHY_TIMING_CTRL_9_TA_GO(timing->ta_go) |
648 DSI_28nm_PHY_TIMING_CTRL_9_TA_SURE(timing->ta_sure));
649 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_10,
650 DSI_28nm_PHY_TIMING_CTRL_10_TA_GET(timing->ta_get));
651 dsi_phy_write(base + REG_DSI_28nm_PHY_TIMING_CTRL_11,
652 DSI_28nm_PHY_TIMING_CTRL_11_TRIG3_CMD(0));
653}
654
655static void dsi_28nm_phy_regulator_enable_dcdc(struct msm_dsi_phy *phy)
656{
657 void __iomem *base = phy->reg_base;
658
659 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_0, 0x0);
660 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CAL_PWR_CFG, 1);
661 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_5, 0);
662 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_3, 0);
663 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_2, 0x3);
664 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_1, 0x9);
665 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_0, 0x7);
666 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_4, 0x20);
667 dsi_phy_write(phy->base + REG_DSI_28nm_PHY_LDO_CNTRL, 0x00);
668}
669
670static void dsi_28nm_phy_regulator_enable_ldo(struct msm_dsi_phy *phy)
671{
672 void __iomem *base = phy->reg_base;
673
674 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_0, 0x0);
675 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CAL_PWR_CFG, 0);
676 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_5, 0x7);
677 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_3, 0);
678 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_2, 0x1);
679 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_1, 0x1);
680 dsi_phy_write(base + REG_DSI_28nm_PHY_REGULATOR_CTRL_4, 0x20);
681
682 if (phy->cfg->quirks & DSI_PHY_28NM_QUIRK_PHY_LP)
683 dsi_phy_write(phy->base + REG_DSI_28nm_PHY_LDO_CNTRL, 0x05);
684 else
685 dsi_phy_write(phy->base + REG_DSI_28nm_PHY_LDO_CNTRL, 0x0d);
686}
687
688static void dsi_28nm_phy_regulator_ctrl(struct msm_dsi_phy *phy, bool enable)
689{
690 if (!enable) {
691 dsi_phy_write(phy->reg_base +
692 REG_DSI_28nm_PHY_REGULATOR_CAL_PWR_CFG, 0);
693 return;
694 }
695
696 if (phy->regulator_ldo_mode)
697 dsi_28nm_phy_regulator_enable_ldo(phy);
698 else
699 dsi_28nm_phy_regulator_enable_dcdc(phy);
700}
701
702static int dsi_28nm_phy_enable(struct msm_dsi_phy *phy,
703 struct msm_dsi_phy_clk_request *clk_req)
704{
705 struct msm_dsi_dphy_timing *timing = &phy->timing;
706 int i;
707 void __iomem *base = phy->base;
708 u32 val;
709
710 DBG("");
711
712 if (msm_dsi_dphy_timing_calc(timing, clk_req)) {
713 DRM_DEV_ERROR(&phy->pdev->dev,
714 "%s: D-PHY timing calculation failed\n", __func__);
715 return -EINVAL;
716 }
717
718 dsi_phy_write(base + REG_DSI_28nm_PHY_STRENGTH_0, 0xff);
719
720 dsi_28nm_phy_regulator_ctrl(phy, true);
721
722 dsi_28nm_dphy_set_timing(phy, timing);
723
724 dsi_phy_write(base + REG_DSI_28nm_PHY_CTRL_1, 0x00);
725 dsi_phy_write(base + REG_DSI_28nm_PHY_CTRL_0, 0x5f);
726
727 dsi_phy_write(base + REG_DSI_28nm_PHY_STRENGTH_1, 0x6);
728
729 for (i = 0; i < 4; i++) {
730 dsi_phy_write(base + REG_DSI_28nm_PHY_LN_CFG_0(i), 0);
731 dsi_phy_write(base + REG_DSI_28nm_PHY_LN_CFG_1(i), 0);
732 dsi_phy_write(base + REG_DSI_28nm_PHY_LN_CFG_2(i), 0);
733 dsi_phy_write(base + REG_DSI_28nm_PHY_LN_CFG_3(i), 0);
734 dsi_phy_write(base + REG_DSI_28nm_PHY_LN_CFG_4(i), 0);
735 dsi_phy_write(base + REG_DSI_28nm_PHY_LN_TEST_DATAPATH(i), 0);
736 dsi_phy_write(base + REG_DSI_28nm_PHY_LN_DEBUG_SEL(i), 0);
737 dsi_phy_write(base + REG_DSI_28nm_PHY_LN_TEST_STR_0(i), 0x1);
738 dsi_phy_write(base + REG_DSI_28nm_PHY_LN_TEST_STR_1(i), 0x97);
739 }
740
741 dsi_phy_write(base + REG_DSI_28nm_PHY_LNCK_CFG_4, 0);
742 dsi_phy_write(base + REG_DSI_28nm_PHY_LNCK_CFG_1, 0xc0);
743 dsi_phy_write(base + REG_DSI_28nm_PHY_LNCK_TEST_STR0, 0x1);
744 dsi_phy_write(base + REG_DSI_28nm_PHY_LNCK_TEST_STR1, 0xbb);
745
746 dsi_phy_write(base + REG_DSI_28nm_PHY_CTRL_0, 0x5f);
747
748 val = dsi_phy_read(base + REG_DSI_28nm_PHY_GLBL_TEST_CTRL);
749 if (phy->id == DSI_1 && phy->usecase == MSM_DSI_PHY_SLAVE)
750 val &= ~DSI_28nm_PHY_GLBL_TEST_CTRL_BITCLK_HS_SEL;
751 else
752 val |= DSI_28nm_PHY_GLBL_TEST_CTRL_BITCLK_HS_SEL;
753 dsi_phy_write(base + REG_DSI_28nm_PHY_GLBL_TEST_CTRL, val);
754
755 return 0;
756}
757
758static void dsi_28nm_phy_disable(struct msm_dsi_phy *phy)
759{
760 dsi_phy_write(phy->base + REG_DSI_28nm_PHY_CTRL_0, 0);
761 dsi_28nm_phy_regulator_ctrl(phy, false);
762
763
764
765
766
767 wmb();
768}
769
770const struct msm_dsi_phy_cfg dsi_phy_28nm_hpm_cfgs = {
771 .has_phy_regulator = true,
772 .reg_cfg = {
773 .num = 1,
774 .regs = {
775 {"vddio", 100000, 100},
776 },
777 },
778 .ops = {
779 .enable = dsi_28nm_phy_enable,
780 .disable = dsi_28nm_phy_disable,
781 .pll_init = dsi_pll_28nm_init,
782 .save_pll_state = dsi_28nm_pll_save_state,
783 .restore_pll_state = dsi_28nm_pll_restore_state,
784 },
785 .min_pll_rate = VCO_MIN_RATE,
786 .max_pll_rate = VCO_MAX_RATE,
787 .io_start = { 0xfd922b00, 0xfd923100 },
788 .num_dsi_phy = 2,
789};
790
791const struct msm_dsi_phy_cfg dsi_phy_28nm_hpm_famb_cfgs = {
792 .has_phy_regulator = true,
793 .reg_cfg = {
794 .num = 1,
795 .regs = {
796 {"vddio", 100000, 100},
797 },
798 },
799 .ops = {
800 .enable = dsi_28nm_phy_enable,
801 .disable = dsi_28nm_phy_disable,
802 .pll_init = dsi_pll_28nm_init,
803 .save_pll_state = dsi_28nm_pll_save_state,
804 .restore_pll_state = dsi_28nm_pll_restore_state,
805 },
806 .min_pll_rate = VCO_MIN_RATE,
807 .max_pll_rate = VCO_MAX_RATE,
808 .io_start = { 0x1a94400, 0x1a96400 },
809 .num_dsi_phy = 2,
810};
811
812const struct msm_dsi_phy_cfg dsi_phy_28nm_lp_cfgs = {
813 .has_phy_regulator = true,
814 .reg_cfg = {
815 .num = 1,
816 .regs = {
817 {"vddio", 100000, 100},
818 },
819 },
820 .ops = {
821 .enable = dsi_28nm_phy_enable,
822 .disable = dsi_28nm_phy_disable,
823 .pll_init = dsi_pll_28nm_init,
824 .save_pll_state = dsi_28nm_pll_save_state,
825 .restore_pll_state = dsi_28nm_pll_restore_state,
826 },
827 .min_pll_rate = VCO_MIN_RATE,
828 .max_pll_rate = VCO_MAX_RATE,
829 .io_start = { 0x1a98500 },
830 .num_dsi_phy = 1,
831 .quirks = DSI_PHY_28NM_QUIRK_PHY_LP,
832};
833
834