1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24#include <subdev/clk.h>
25#include "pll.h"
26
27#include <core/device.h>
28#include <subdev/timer.h>
29#include <subdev/bios.h>
30#include <subdev/bios/pll.h>
31
32struct gk104_clk_info {
33 u32 freq;
34 u32 ssel;
35 u32 mdiv;
36 u32 dsrc;
37 u32 ddiv;
38 u32 coef;
39};
40
41struct gk104_clk_priv {
42 struct nvkm_clk base;
43 struct gk104_clk_info eng[16];
44};
45
46static u32 read_div(struct gk104_clk_priv *, int, u32, u32);
47static u32 read_pll(struct gk104_clk_priv *, u32);
48
49static u32
50read_vco(struct gk104_clk_priv *priv, u32 dsrc)
51{
52 u32 ssrc = nv_rd32(priv, dsrc);
53 if (!(ssrc & 0x00000100))
54 return read_pll(priv, 0x00e800);
55 return read_pll(priv, 0x00e820);
56}
57
58static u32
59read_pll(struct gk104_clk_priv *priv, u32 pll)
60{
61 u32 ctrl = nv_rd32(priv, pll + 0x00);
62 u32 coef = nv_rd32(priv, pll + 0x04);
63 u32 P = (coef & 0x003f0000) >> 16;
64 u32 N = (coef & 0x0000ff00) >> 8;
65 u32 M = (coef & 0x000000ff) >> 0;
66 u32 sclk;
67 u16 fN = 0xf000;
68
69 if (!(ctrl & 0x00000001))
70 return 0;
71
72 switch (pll) {
73 case 0x00e800:
74 case 0x00e820:
75 sclk = nv_device(priv)->crystal;
76 P = 1;
77 break;
78 case 0x132000:
79 sclk = read_pll(priv, 0x132020);
80 P = (coef & 0x10000000) ? 2 : 1;
81 break;
82 case 0x132020:
83 sclk = read_div(priv, 0, 0x137320, 0x137330);
84 fN = nv_rd32(priv, pll + 0x10) >> 16;
85 break;
86 case 0x137000:
87 case 0x137020:
88 case 0x137040:
89 case 0x1370e0:
90 sclk = read_div(priv, (pll & 0xff) / 0x20, 0x137120, 0x137140);
91 break;
92 default:
93 return 0;
94 }
95
96 if (P == 0)
97 P = 1;
98
99 sclk = (sclk * N) + (((u16)(fN + 4096) * sclk) >> 13);
100 return sclk / (M * P);
101}
102
103static u32
104read_div(struct gk104_clk_priv *priv, int doff, u32 dsrc, u32 dctl)
105{
106 u32 ssrc = nv_rd32(priv, dsrc + (doff * 4));
107 u32 sctl = nv_rd32(priv, dctl + (doff * 4));
108
109 switch (ssrc & 0x00000003) {
110 case 0:
111 if ((ssrc & 0x00030000) != 0x00030000)
112 return nv_device(priv)->crystal;
113 return 108000;
114 case 2:
115 return 100000;
116 case 3:
117 if (sctl & 0x80000000) {
118 u32 sclk = read_vco(priv, dsrc + (doff * 4));
119 u32 sdiv = (sctl & 0x0000003f) + 2;
120 return (sclk * 2) / sdiv;
121 }
122
123 return read_vco(priv, dsrc + (doff * 4));
124 default:
125 return 0;
126 }
127}
128
129static u32
130read_mem(struct gk104_clk_priv *priv)
131{
132 switch (nv_rd32(priv, 0x1373f4) & 0x0000000f) {
133 case 1: return read_pll(priv, 0x132020);
134 case 2: return read_pll(priv, 0x132000);
135 default:
136 return 0;
137 }
138}
139
140static u32
141read_clk(struct gk104_clk_priv *priv, int clk)
142{
143 u32 sctl = nv_rd32(priv, 0x137250 + (clk * 4));
144 u32 sclk, sdiv;
145
146 if (clk < 7) {
147 u32 ssel = nv_rd32(priv, 0x137100);
148 if (ssel & (1 << clk)) {
149 sclk = read_pll(priv, 0x137000 + (clk * 0x20));
150 sdiv = 1;
151 } else {
152 sclk = read_div(priv, clk, 0x137160, 0x1371d0);
153 sdiv = 0;
154 }
155 } else {
156 u32 ssrc = nv_rd32(priv, 0x137160 + (clk * 0x04));
157 if ((ssrc & 0x00000003) == 0x00000003) {
158 sclk = read_div(priv, clk, 0x137160, 0x1371d0);
159 if (ssrc & 0x00000100) {
160 if (ssrc & 0x40000000)
161 sclk = read_pll(priv, 0x1370e0);
162 sdiv = 1;
163 } else {
164 sdiv = 0;
165 }
166 } else {
167 sclk = read_div(priv, clk, 0x137160, 0x1371d0);
168 sdiv = 0;
169 }
170 }
171
172 if (sctl & 0x80000000) {
173 if (sdiv)
174 sdiv = ((sctl & 0x00003f00) >> 8) + 2;
175 else
176 sdiv = ((sctl & 0x0000003f) >> 0) + 2;
177 return (sclk * 2) / sdiv;
178 }
179
180 return sclk;
181}
182
183static int
184gk104_clk_read(struct nvkm_clk *clk, enum nv_clk_src src)
185{
186 struct nvkm_device *device = nv_device(clk);
187 struct gk104_clk_priv *priv = (void *)clk;
188
189 switch (src) {
190 case nv_clk_src_crystal:
191 return device->crystal;
192 case nv_clk_src_href:
193 return 100000;
194 case nv_clk_src_mem:
195 return read_mem(priv);
196 case nv_clk_src_gpc:
197 return read_clk(priv, 0x00);
198 case nv_clk_src_rop:
199 return read_clk(priv, 0x01);
200 case nv_clk_src_hubk07:
201 return read_clk(priv, 0x02);
202 case nv_clk_src_hubk06:
203 return read_clk(priv, 0x07);
204 case nv_clk_src_hubk01:
205 return read_clk(priv, 0x08);
206 case nv_clk_src_daemon:
207 return read_clk(priv, 0x0c);
208 case nv_clk_src_vdec:
209 return read_clk(priv, 0x0e);
210 default:
211 nv_error(clk, "invalid clock source %d\n", src);
212 return -EINVAL;
213 }
214}
215
216static u32
217calc_div(struct gk104_clk_priv *priv, int clk, u32 ref, u32 freq, u32 *ddiv)
218{
219 u32 div = min((ref * 2) / freq, (u32)65);
220 if (div < 2)
221 div = 2;
222
223 *ddiv = div - 2;
224 return (ref * 2) / div;
225}
226
227static u32
228calc_src(struct gk104_clk_priv *priv, int clk, u32 freq, u32 *dsrc, u32 *ddiv)
229{
230 u32 sclk;
231
232
233 *ddiv = 0x00000000;
234 switch (freq) {
235 case 27000:
236 case 108000:
237 *dsrc = 0x00000000;
238 if (freq == 108000)
239 *dsrc |= 0x00030000;
240 return freq;
241 case 100000:
242 *dsrc = 0x00000002;
243 return freq;
244 default:
245 *dsrc = 0x00000003;
246 break;
247 }
248
249
250 sclk = read_vco(priv, 0x137160 + (clk * 4));
251 if (clk < 7)
252 sclk = calc_div(priv, clk, sclk, freq, ddiv);
253 return sclk;
254}
255
256static u32
257calc_pll(struct gk104_clk_priv *priv, int clk, u32 freq, u32 *coef)
258{
259 struct nvkm_bios *bios = nvkm_bios(priv);
260 struct nvbios_pll limits;
261 int N, M, P, ret;
262
263 ret = nvbios_pll_parse(bios, 0x137000 + (clk * 0x20), &limits);
264 if (ret)
265 return 0;
266
267 limits.refclk = read_div(priv, clk, 0x137120, 0x137140);
268 if (!limits.refclk)
269 return 0;
270
271 ret = gt215_pll_calc(nv_subdev(priv), &limits, freq, &N, NULL, &M, &P);
272 if (ret <= 0)
273 return 0;
274
275 *coef = (P << 16) | (N << 8) | M;
276 return ret;
277}
278
279static int
280calc_clk(struct gk104_clk_priv *priv,
281 struct nvkm_cstate *cstate, int clk, int dom)
282{
283 struct gk104_clk_info *info = &priv->eng[clk];
284 u32 freq = cstate->domain[dom];
285 u32 src0, div0, div1D, div1P = 0;
286 u32 clk0, clk1 = 0;
287
288
289 if (!freq)
290 return 0;
291
292
293 clk0 = calc_src(priv, clk, freq, &src0, &div0);
294 clk0 = calc_div(priv, clk, clk0, freq, &div1D);
295
296
297 if (clk0 != freq && (0x0000ff87 & (1 << clk))) {
298 if (clk <= 7)
299 clk1 = calc_pll(priv, clk, freq, &info->coef);
300 else
301 clk1 = cstate->domain[nv_clk_src_hubk06];
302 clk1 = calc_div(priv, clk, clk1, freq, &div1P);
303 }
304
305
306 if (abs((int)freq - clk0) <= abs((int)freq - clk1)) {
307 info->dsrc = src0;
308 if (div0) {
309 info->ddiv |= 0x80000000;
310 info->ddiv |= div0;
311 }
312 if (div1D) {
313 info->mdiv |= 0x80000000;
314 info->mdiv |= div1D;
315 }
316 info->ssel = 0;
317 info->freq = clk0;
318 } else {
319 if (div1P) {
320 info->mdiv |= 0x80000000;
321 info->mdiv |= div1P << 8;
322 }
323 info->ssel = (1 << clk);
324 info->dsrc = 0x40000100;
325 info->freq = clk1;
326 }
327
328 return 0;
329}
330
331static int
332gk104_clk_calc(struct nvkm_clk *clk, struct nvkm_cstate *cstate)
333{
334 struct gk104_clk_priv *priv = (void *)clk;
335 int ret;
336
337 if ((ret = calc_clk(priv, cstate, 0x00, nv_clk_src_gpc)) ||
338 (ret = calc_clk(priv, cstate, 0x01, nv_clk_src_rop)) ||
339 (ret = calc_clk(priv, cstate, 0x02, nv_clk_src_hubk07)) ||
340 (ret = calc_clk(priv, cstate, 0x07, nv_clk_src_hubk06)) ||
341 (ret = calc_clk(priv, cstate, 0x08, nv_clk_src_hubk01)) ||
342 (ret = calc_clk(priv, cstate, 0x0c, nv_clk_src_daemon)) ||
343 (ret = calc_clk(priv, cstate, 0x0e, nv_clk_src_vdec)))
344 return ret;
345
346 return 0;
347}
348
349static void
350gk104_clk_prog_0(struct gk104_clk_priv *priv, int clk)
351{
352 struct gk104_clk_info *info = &priv->eng[clk];
353 if (!info->ssel) {
354 nv_mask(priv, 0x1371d0 + (clk * 0x04), 0x8000003f, info->ddiv);
355 nv_wr32(priv, 0x137160 + (clk * 0x04), info->dsrc);
356 }
357}
358
359static void
360gk104_clk_prog_1_0(struct gk104_clk_priv *priv, int clk)
361{
362 nv_mask(priv, 0x137100, (1 << clk), 0x00000000);
363 nv_wait(priv, 0x137100, (1 << clk), 0x00000000);
364}
365
366static void
367gk104_clk_prog_1_1(struct gk104_clk_priv *priv, int clk)
368{
369 nv_mask(priv, 0x137160 + (clk * 0x04), 0x00000100, 0x00000000);
370}
371
372static void
373gk104_clk_prog_2(struct gk104_clk_priv *priv, int clk)
374{
375 struct gk104_clk_info *info = &priv->eng[clk];
376 const u32 addr = 0x137000 + (clk * 0x20);
377 nv_mask(priv, addr + 0x00, 0x00000004, 0x00000000);
378 nv_mask(priv, addr + 0x00, 0x00000001, 0x00000000);
379 if (info->coef) {
380 nv_wr32(priv, addr + 0x04, info->coef);
381 nv_mask(priv, addr + 0x00, 0x00000001, 0x00000001);
382 nv_wait(priv, addr + 0x00, 0x00020000, 0x00020000);
383 nv_mask(priv, addr + 0x00, 0x00020004, 0x00000004);
384 }
385}
386
387static void
388gk104_clk_prog_3(struct gk104_clk_priv *priv, int clk)
389{
390 struct gk104_clk_info *info = &priv->eng[clk];
391 if (info->ssel)
392 nv_mask(priv, 0x137250 + (clk * 0x04), 0x00003f00, info->mdiv);
393 else
394 nv_mask(priv, 0x137250 + (clk * 0x04), 0x0000003f, info->mdiv);
395}
396
397static void
398gk104_clk_prog_4_0(struct gk104_clk_priv *priv, int clk)
399{
400 struct gk104_clk_info *info = &priv->eng[clk];
401 if (info->ssel) {
402 nv_mask(priv, 0x137100, (1 << clk), info->ssel);
403 nv_wait(priv, 0x137100, (1 << clk), info->ssel);
404 }
405}
406
407static void
408gk104_clk_prog_4_1(struct gk104_clk_priv *priv, int clk)
409{
410 struct gk104_clk_info *info = &priv->eng[clk];
411 if (info->ssel) {
412 nv_mask(priv, 0x137160 + (clk * 0x04), 0x40000000, 0x40000000);
413 nv_mask(priv, 0x137160 + (clk * 0x04), 0x00000100, 0x00000100);
414 }
415}
416
417static int
418gk104_clk_prog(struct nvkm_clk *clk)
419{
420 struct gk104_clk_priv *priv = (void *)clk;
421 struct {
422 u32 mask;
423 void (*exec)(struct gk104_clk_priv *, int);
424 } stage[] = {
425 { 0x007f, gk104_clk_prog_0 },
426 { 0x007f, gk104_clk_prog_1_0 },
427 { 0xff80, gk104_clk_prog_1_1 },
428 { 0x00ff, gk104_clk_prog_2 },
429 { 0xff80, gk104_clk_prog_3 },
430 { 0x007f, gk104_clk_prog_4_0 },
431 { 0xff80, gk104_clk_prog_4_1 },
432 };
433 int i, j;
434
435 for (i = 0; i < ARRAY_SIZE(stage); i++) {
436 for (j = 0; j < ARRAY_SIZE(priv->eng); j++) {
437 if (!(stage[i].mask & (1 << j)))
438 continue;
439 if (!priv->eng[j].freq)
440 continue;
441 stage[i].exec(priv, j);
442 }
443 }
444
445 return 0;
446}
447
448static void
449gk104_clk_tidy(struct nvkm_clk *clk)
450{
451 struct gk104_clk_priv *priv = (void *)clk;
452 memset(priv->eng, 0x00, sizeof(priv->eng));
453}
454
455static struct nvkm_domain
456gk104_domain[] = {
457 { nv_clk_src_crystal, 0xff },
458 { nv_clk_src_href , 0xff },
459 { nv_clk_src_gpc , 0x00, NVKM_CLK_DOM_FLAG_CORE, "core", 2000 },
460 { nv_clk_src_hubk07 , 0x01, NVKM_CLK_DOM_FLAG_CORE },
461 { nv_clk_src_rop , 0x02, NVKM_CLK_DOM_FLAG_CORE },
462 { nv_clk_src_mem , 0x03, 0, "memory", 500 },
463 { nv_clk_src_hubk06 , 0x04, NVKM_CLK_DOM_FLAG_CORE },
464 { nv_clk_src_hubk01 , 0x05 },
465 { nv_clk_src_vdec , 0x06 },
466 { nv_clk_src_daemon , 0x07 },
467 { nv_clk_src_max }
468};
469
470static int
471gk104_clk_ctor(struct nvkm_object *parent, struct nvkm_object *engine,
472 struct nvkm_oclass *oclass, void *data, u32 size,
473 struct nvkm_object **pobject)
474{
475 struct gk104_clk_priv *priv;
476 int ret;
477
478 ret = nvkm_clk_create(parent, engine, oclass, gk104_domain,
479 NULL, 0, true, &priv);
480 *pobject = nv_object(priv);
481 if (ret)
482 return ret;
483
484 priv->base.read = gk104_clk_read;
485 priv->base.calc = gk104_clk_calc;
486 priv->base.prog = gk104_clk_prog;
487 priv->base.tidy = gk104_clk_tidy;
488 return 0;
489}
490
491struct nvkm_oclass
492gk104_clk_oclass = {
493 .handle = NV_SUBDEV(CLK, 0xe0),
494 .ofuncs = &(struct nvkm_ofuncs) {
495 .ctor = gk104_clk_ctor,
496 .dtor = _nvkm_clk_dtor,
497 .init = _nvkm_clk_init,
498 .fini = _nvkm_clk_fini,
499 },
500};
501