1
2
3
4
5
6
7
8
9
10
11
12
13
14#include <linux/kernel.h>
15#include <linux/init.h>
16#include <linux/err.h>
17#include <linux/clk.h>
18#include <linux/clkdev.h>
19#include <linux/clk-provider.h>
20#include <linux/of.h>
21#include <linux/of_address.h>
22#include <dt-bindings/clock/ath79-clk.h>
23
24#include <asm/div64.h>
25
26#include <asm/mach-ath79/ath79.h>
27#include <asm/mach-ath79/ar71xx_regs.h>
28#include "common.h"
29#include "machtypes.h"
30
31#define AR71XX_BASE_FREQ 40000000
32#define AR724X_BASE_FREQ 40000000
33
34static struct clk *clks[ATH79_CLK_END];
35static struct clk_onecell_data clk_data = {
36 .clks = clks,
37 .clk_num = ARRAY_SIZE(clks),
38};
39
40static struct clk *__init ath79_add_sys_clkdev(
41 const char *id, unsigned long rate)
42{
43 struct clk *clk;
44 int err;
45
46 clk = clk_register_fixed_rate(NULL, id, NULL, 0, rate);
47 if (IS_ERR(clk))
48 panic("failed to allocate %s clock structure", id);
49
50 err = clk_register_clkdev(clk, id, NULL);
51 if (err)
52 panic("unable to register %s clock device", id);
53
54 return clk;
55}
56
57static void __init ar71xx_clocks_init(void)
58{
59 unsigned long ref_rate;
60 unsigned long cpu_rate;
61 unsigned long ddr_rate;
62 unsigned long ahb_rate;
63 u32 pll;
64 u32 freq;
65 u32 div;
66
67 ref_rate = AR71XX_BASE_FREQ;
68
69 pll = ath79_pll_rr(AR71XX_PLL_REG_CPU_CONFIG);
70
71 div = ((pll >> AR71XX_PLL_FB_SHIFT) & AR71XX_PLL_FB_MASK) + 1;
72 freq = div * ref_rate;
73
74 div = ((pll >> AR71XX_CPU_DIV_SHIFT) & AR71XX_CPU_DIV_MASK) + 1;
75 cpu_rate = freq / div;
76
77 div = ((pll >> AR71XX_DDR_DIV_SHIFT) & AR71XX_DDR_DIV_MASK) + 1;
78 ddr_rate = freq / div;
79
80 div = (((pll >> AR71XX_AHB_DIV_SHIFT) & AR71XX_AHB_DIV_MASK) + 1) * 2;
81 ahb_rate = cpu_rate / div;
82
83 ath79_add_sys_clkdev("ref", ref_rate);
84 clks[ATH79_CLK_CPU] = ath79_add_sys_clkdev("cpu", cpu_rate);
85 clks[ATH79_CLK_DDR] = ath79_add_sys_clkdev("ddr", ddr_rate);
86 clks[ATH79_CLK_AHB] = ath79_add_sys_clkdev("ahb", ahb_rate);
87
88 clk_add_alias("wdt", NULL, "ahb", NULL);
89 clk_add_alias("uart", NULL, "ahb", NULL);
90}
91
92static struct clk * __init ath79_reg_ffclk(const char *name,
93 const char *parent_name, unsigned int mult, unsigned int div)
94{
95 struct clk *clk;
96
97 clk = clk_register_fixed_factor(NULL, name, parent_name, 0, mult, div);
98 if (IS_ERR(clk))
99 panic("failed to allocate %s clock structure", name);
100
101 return clk;
102}
103
104static void __init ar724x_clk_init(struct clk *ref_clk, void __iomem *pll_base)
105{
106 u32 pll;
107 u32 mult, div, ddr_div, ahb_div;
108
109 pll = __raw_readl(pll_base + AR724X_PLL_REG_CPU_CONFIG);
110
111 mult = ((pll >> AR724X_PLL_FB_SHIFT) & AR724X_PLL_FB_MASK);
112 div = ((pll >> AR724X_PLL_REF_DIV_SHIFT) & AR724X_PLL_REF_DIV_MASK) * 2;
113
114 ddr_div = ((pll >> AR724X_DDR_DIV_SHIFT) & AR724X_DDR_DIV_MASK) + 1;
115 ahb_div = (((pll >> AR724X_AHB_DIV_SHIFT) & AR724X_AHB_DIV_MASK) + 1) * 2;
116
117 clks[ATH79_CLK_CPU] = ath79_reg_ffclk("cpu", "ref", mult, div);
118 clks[ATH79_CLK_DDR] = ath79_reg_ffclk("ddr", "ref", mult, div * ddr_div);
119 clks[ATH79_CLK_AHB] = ath79_reg_ffclk("ahb", "ref", mult, div * ahb_div);
120}
121
122static void __init ar724x_clocks_init(void)
123{
124 struct clk *ref_clk;
125
126 ref_clk = ath79_add_sys_clkdev("ref", AR724X_BASE_FREQ);
127
128 ar724x_clk_init(ref_clk, ath79_pll_base);
129
130
131 clk_register_clkdev(clks[ATH79_CLK_CPU], "cpu", NULL);
132 clk_register_clkdev(clks[ATH79_CLK_DDR], "ddr", NULL);
133 clk_register_clkdev(clks[ATH79_CLK_AHB], "ahb", NULL);
134
135 clk_add_alias("wdt", NULL, "ahb", NULL);
136 clk_add_alias("uart", NULL, "ahb", NULL);
137}
138
139static void __init ar9330_clk_init(struct clk *ref_clk, void __iomem *pll_base)
140{
141 u32 clock_ctrl;
142 u32 ref_div;
143 u32 ninit_mul;
144 u32 out_div;
145
146 u32 cpu_div;
147 u32 ddr_div;
148 u32 ahb_div;
149
150 clock_ctrl = __raw_readl(pll_base + AR933X_PLL_CLOCK_CTRL_REG);
151 if (clock_ctrl & AR933X_PLL_CLOCK_CTRL_BYPASS) {
152 ref_div = 1;
153 ninit_mul = 1;
154 out_div = 1;
155
156 cpu_div = 1;
157 ddr_div = 1;
158 ahb_div = 1;
159 } else {
160 u32 cpu_config;
161 u32 t;
162
163 cpu_config = __raw_readl(pll_base + AR933X_PLL_CPU_CONFIG_REG);
164
165 t = (cpu_config >> AR933X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
166 AR933X_PLL_CPU_CONFIG_REFDIV_MASK;
167 ref_div = t;
168
169 ninit_mul = (cpu_config >> AR933X_PLL_CPU_CONFIG_NINT_SHIFT) &
170 AR933X_PLL_CPU_CONFIG_NINT_MASK;
171
172 t = (cpu_config >> AR933X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
173 AR933X_PLL_CPU_CONFIG_OUTDIV_MASK;
174 if (t == 0)
175 t = 1;
176
177 out_div = (1 << t);
178
179 cpu_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_CPU_DIV_SHIFT) &
180 AR933X_PLL_CLOCK_CTRL_CPU_DIV_MASK) + 1;
181
182 ddr_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_DDR_DIV_SHIFT) &
183 AR933X_PLL_CLOCK_CTRL_DDR_DIV_MASK) + 1;
184
185 ahb_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_AHB_DIV_SHIFT) &
186 AR933X_PLL_CLOCK_CTRL_AHB_DIV_MASK) + 1;
187 }
188
189 clks[ATH79_CLK_CPU] = ath79_reg_ffclk("cpu", "ref",
190 ninit_mul, ref_div * out_div * cpu_div);
191 clks[ATH79_CLK_DDR] = ath79_reg_ffclk("ddr", "ref",
192 ninit_mul, ref_div * out_div * ddr_div);
193 clks[ATH79_CLK_AHB] = ath79_reg_ffclk("ahb", "ref",
194 ninit_mul, ref_div * out_div * ahb_div);
195}
196
197static void __init ar933x_clocks_init(void)
198{
199 struct clk *ref_clk;
200 unsigned long ref_rate;
201 u32 t;
202
203 t = ath79_reset_rr(AR933X_RESET_REG_BOOTSTRAP);
204 if (t & AR933X_BOOTSTRAP_REF_CLK_40)
205 ref_rate = (40 * 1000 * 1000);
206 else
207 ref_rate = (25 * 1000 * 1000);
208
209 ref_clk = ath79_add_sys_clkdev("ref", ref_rate);
210
211 ar9330_clk_init(ref_clk, ath79_pll_base);
212
213
214 clk_register_clkdev(clks[ATH79_CLK_CPU], "cpu", NULL);
215 clk_register_clkdev(clks[ATH79_CLK_DDR], "ddr", NULL);
216 clk_register_clkdev(clks[ATH79_CLK_AHB], "ahb", NULL);
217
218 clk_add_alias("wdt", NULL, "ahb", NULL);
219 clk_add_alias("uart", NULL, "ref", NULL);
220}
221
222static u32 __init ar934x_get_pll_freq(u32 ref, u32 ref_div, u32 nint, u32 nfrac,
223 u32 frac, u32 out_div)
224{
225 u64 t;
226 u32 ret;
227
228 t = ref;
229 t *= nint;
230 do_div(t, ref_div);
231 ret = t;
232
233 t = ref;
234 t *= nfrac;
235 do_div(t, ref_div * frac);
236 ret += t;
237
238 ret /= (1 << out_div);
239 return ret;
240}
241
242static void __init ar934x_clocks_init(void)
243{
244 unsigned long ref_rate;
245 unsigned long cpu_rate;
246 unsigned long ddr_rate;
247 unsigned long ahb_rate;
248 u32 pll, out_div, ref_div, nint, nfrac, frac, clk_ctrl, postdiv;
249 u32 cpu_pll, ddr_pll;
250 u32 bootstrap;
251 void __iomem *dpll_base;
252
253 dpll_base = ioremap(AR934X_SRIF_BASE, AR934X_SRIF_SIZE);
254
255 bootstrap = ath79_reset_rr(AR934X_RESET_REG_BOOTSTRAP);
256 if (bootstrap & AR934X_BOOTSTRAP_REF_CLK_40)
257 ref_rate = 40 * 1000 * 1000;
258 else
259 ref_rate = 25 * 1000 * 1000;
260
261 pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL2_REG);
262 if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
263 out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
264 AR934X_SRIF_DPLL2_OUTDIV_MASK;
265 pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL1_REG);
266 nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
267 AR934X_SRIF_DPLL1_NINT_MASK;
268 nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
269 ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
270 AR934X_SRIF_DPLL1_REFDIV_MASK;
271 frac = 1 << 18;
272 } else {
273 pll = ath79_pll_rr(AR934X_PLL_CPU_CONFIG_REG);
274 out_div = (pll >> AR934X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
275 AR934X_PLL_CPU_CONFIG_OUTDIV_MASK;
276 ref_div = (pll >> AR934X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
277 AR934X_PLL_CPU_CONFIG_REFDIV_MASK;
278 nint = (pll >> AR934X_PLL_CPU_CONFIG_NINT_SHIFT) &
279 AR934X_PLL_CPU_CONFIG_NINT_MASK;
280 nfrac = (pll >> AR934X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
281 AR934X_PLL_CPU_CONFIG_NFRAC_MASK;
282 frac = 1 << 6;
283 }
284
285 cpu_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
286 nfrac, frac, out_div);
287
288 pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL2_REG);
289 if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
290 out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
291 AR934X_SRIF_DPLL2_OUTDIV_MASK;
292 pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL1_REG);
293 nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
294 AR934X_SRIF_DPLL1_NINT_MASK;
295 nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
296 ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
297 AR934X_SRIF_DPLL1_REFDIV_MASK;
298 frac = 1 << 18;
299 } else {
300 pll = ath79_pll_rr(AR934X_PLL_DDR_CONFIG_REG);
301 out_div = (pll >> AR934X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
302 AR934X_PLL_DDR_CONFIG_OUTDIV_MASK;
303 ref_div = (pll >> AR934X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
304 AR934X_PLL_DDR_CONFIG_REFDIV_MASK;
305 nint = (pll >> AR934X_PLL_DDR_CONFIG_NINT_SHIFT) &
306 AR934X_PLL_DDR_CONFIG_NINT_MASK;
307 nfrac = (pll >> AR934X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
308 AR934X_PLL_DDR_CONFIG_NFRAC_MASK;
309 frac = 1 << 10;
310 }
311
312 ddr_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
313 nfrac, frac, out_div);
314
315 clk_ctrl = ath79_pll_rr(AR934X_PLL_CPU_DDR_CLK_CTRL_REG);
316
317 postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_SHIFT) &
318 AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_MASK;
319
320 if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_PLL_BYPASS)
321 cpu_rate = ref_rate;
322 else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPUCLK_FROM_CPUPLL)
323 cpu_rate = cpu_pll / (postdiv + 1);
324 else
325 cpu_rate = ddr_pll / (postdiv + 1);
326
327 postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_SHIFT) &
328 AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_MASK;
329
330 if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_PLL_BYPASS)
331 ddr_rate = ref_rate;
332 else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDRCLK_FROM_DDRPLL)
333 ddr_rate = ddr_pll / (postdiv + 1);
334 else
335 ddr_rate = cpu_pll / (postdiv + 1);
336
337 postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_SHIFT) &
338 AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_MASK;
339
340 if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_PLL_BYPASS)
341 ahb_rate = ref_rate;
342 else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHBCLK_FROM_DDRPLL)
343 ahb_rate = ddr_pll / (postdiv + 1);
344 else
345 ahb_rate = cpu_pll / (postdiv + 1);
346
347 ath79_add_sys_clkdev("ref", ref_rate);
348 clks[ATH79_CLK_CPU] = ath79_add_sys_clkdev("cpu", cpu_rate);
349 clks[ATH79_CLK_DDR] = ath79_add_sys_clkdev("ddr", ddr_rate);
350 clks[ATH79_CLK_AHB] = ath79_add_sys_clkdev("ahb", ahb_rate);
351
352 clk_add_alias("wdt", NULL, "ref", NULL);
353 clk_add_alias("uart", NULL, "ref", NULL);
354
355 iounmap(dpll_base);
356}
357
358static void __init qca955x_clocks_init(void)
359{
360 unsigned long ref_rate;
361 unsigned long cpu_rate;
362 unsigned long ddr_rate;
363 unsigned long ahb_rate;
364 u32 pll, out_div, ref_div, nint, frac, clk_ctrl, postdiv;
365 u32 cpu_pll, ddr_pll;
366 u32 bootstrap;
367
368 bootstrap = ath79_reset_rr(QCA955X_RESET_REG_BOOTSTRAP);
369 if (bootstrap & QCA955X_BOOTSTRAP_REF_CLK_40)
370 ref_rate = 40 * 1000 * 1000;
371 else
372 ref_rate = 25 * 1000 * 1000;
373
374 pll = ath79_pll_rr(QCA955X_PLL_CPU_CONFIG_REG);
375 out_div = (pll >> QCA955X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
376 QCA955X_PLL_CPU_CONFIG_OUTDIV_MASK;
377 ref_div = (pll >> QCA955X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
378 QCA955X_PLL_CPU_CONFIG_REFDIV_MASK;
379 nint = (pll >> QCA955X_PLL_CPU_CONFIG_NINT_SHIFT) &
380 QCA955X_PLL_CPU_CONFIG_NINT_MASK;
381 frac = (pll >> QCA955X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
382 QCA955X_PLL_CPU_CONFIG_NFRAC_MASK;
383
384 cpu_pll = nint * ref_rate / ref_div;
385 cpu_pll += frac * ref_rate / (ref_div * (1 << 6));
386 cpu_pll /= (1 << out_div);
387
388 pll = ath79_pll_rr(QCA955X_PLL_DDR_CONFIG_REG);
389 out_div = (pll >> QCA955X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
390 QCA955X_PLL_DDR_CONFIG_OUTDIV_MASK;
391 ref_div = (pll >> QCA955X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
392 QCA955X_PLL_DDR_CONFIG_REFDIV_MASK;
393 nint = (pll >> QCA955X_PLL_DDR_CONFIG_NINT_SHIFT) &
394 QCA955X_PLL_DDR_CONFIG_NINT_MASK;
395 frac = (pll >> QCA955X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
396 QCA955X_PLL_DDR_CONFIG_NFRAC_MASK;
397
398 ddr_pll = nint * ref_rate / ref_div;
399 ddr_pll += frac * ref_rate / (ref_div * (1 << 10));
400 ddr_pll /= (1 << out_div);
401
402 clk_ctrl = ath79_pll_rr(QCA955X_PLL_CLK_CTRL_REG);
403
404 postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
405 QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
406
407 if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
408 cpu_rate = ref_rate;
409 else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPUCLK_FROM_CPUPLL)
410 cpu_rate = ddr_pll / (postdiv + 1);
411 else
412 cpu_rate = cpu_pll / (postdiv + 1);
413
414 postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
415 QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
416
417 if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
418 ddr_rate = ref_rate;
419 else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDRCLK_FROM_DDRPLL)
420 ddr_rate = cpu_pll / (postdiv + 1);
421 else
422 ddr_rate = ddr_pll / (postdiv + 1);
423
424 postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
425 QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
426
427 if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
428 ahb_rate = ref_rate;
429 else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
430 ahb_rate = ddr_pll / (postdiv + 1);
431 else
432 ahb_rate = cpu_pll / (postdiv + 1);
433
434 ath79_add_sys_clkdev("ref", ref_rate);
435 clks[ATH79_CLK_CPU] = ath79_add_sys_clkdev("cpu", cpu_rate);
436 clks[ATH79_CLK_DDR] = ath79_add_sys_clkdev("ddr", ddr_rate);
437 clks[ATH79_CLK_AHB] = ath79_add_sys_clkdev("ahb", ahb_rate);
438
439 clk_add_alias("wdt", NULL, "ref", NULL);
440 clk_add_alias("uart", NULL, "ref", NULL);
441}
442
443void __init ath79_clocks_init(void)
444{
445 if (soc_is_ar71xx())
446 ar71xx_clocks_init();
447 else if (soc_is_ar724x() || soc_is_ar913x())
448 ar724x_clocks_init();
449 else if (soc_is_ar933x())
450 ar933x_clocks_init();
451 else if (soc_is_ar934x())
452 ar934x_clocks_init();
453 else if (soc_is_qca955x())
454 qca955x_clocks_init();
455 else
456 BUG();
457}
458
459unsigned long __init
460ath79_get_sys_clk_rate(const char *id)
461{
462 struct clk *clk;
463 unsigned long rate;
464
465 clk = clk_get(NULL, id);
466 if (IS_ERR(clk))
467 panic("unable to get %s clock, err=%d", id, (int) PTR_ERR(clk));
468
469 rate = clk_get_rate(clk);
470 clk_put(clk);
471
472 return rate;
473}
474
475#ifdef CONFIG_OF
476static void __init ath79_clocks_init_dt(struct device_node *np)
477{
478 of_clk_add_provider(np, of_clk_src_onecell_get, &clk_data);
479}
480
481CLK_OF_DECLARE(ar7100, "qca,ar7100-pll", ath79_clocks_init_dt);
482CLK_OF_DECLARE(ar7240, "qca,ar7240-pll", ath79_clocks_init_dt);
483CLK_OF_DECLARE(ar9340, "qca,ar9340-pll", ath79_clocks_init_dt);
484CLK_OF_DECLARE(ar9550, "qca,qca9550-pll", ath79_clocks_init_dt);
485
486static void __init ath79_clocks_init_dt_ng(struct device_node *np)
487{
488 struct clk *ref_clk;
489 void __iomem *pll_base;
490 const char *dnfn = of_node_full_name(np);
491
492 ref_clk = of_clk_get(np, 0);
493 if (IS_ERR(ref_clk)) {
494 pr_err("%s: of_clk_get failed\n", dnfn);
495 goto err;
496 }
497
498 pll_base = of_iomap(np, 0);
499 if (!pll_base) {
500 pr_err("%s: can't map pll registers\n", dnfn);
501 goto err_clk;
502 }
503
504 if (of_device_is_compatible(np, "qca,ar9130-pll"))
505 ar724x_clk_init(ref_clk, pll_base);
506 else if (of_device_is_compatible(np, "qca,ar9330-pll"))
507 ar9330_clk_init(ref_clk, pll_base);
508 else {
509 pr_err("%s: could not find any appropriate clk_init()\n", dnfn);
510 goto err_iounmap;
511 }
512
513 if (of_clk_add_provider(np, of_clk_src_onecell_get, &clk_data)) {
514 pr_err("%s: could not register clk provider\n", dnfn);
515 goto err_iounmap;
516 }
517
518 return;
519
520err_iounmap:
521 iounmap(pll_base);
522
523err_clk:
524 clk_put(ref_clk);
525
526err:
527 return;
528}
529CLK_OF_DECLARE(ar9130_clk, "qca,ar9130-pll", ath79_clocks_init_dt_ng);
530CLK_OF_DECLARE(ar9330_clk, "qca,ar9330-pll", ath79_clocks_init_dt_ng);
531#endif
532