xref: /linux/arch/mips/ath79/clock.c (revision ebf68996de0ab250c5d520eb2291ab65643e9a1e)
1 /*
2  *  Atheros AR71XX/AR724X/AR913X common routines
3  *
4  *  Copyright (C) 2010-2011 Jaiganesh Narayanan <jnarayanan@atheros.com>
5  *  Copyright (C) 2011 Gabor Juhos <juhosg@openwrt.org>
6  *
7  *  Parts of this file are based on Atheros' 2.6.15/2.6.31 BSP
8  *
9  *  This program is free software; you can redistribute it and/or modify it
10  *  under the terms of the GNU General Public License version 2 as published
11  *  by the Free Software Foundation.
12  */
13 
14 #include <linux/kernel.h>
15 #include <linux/init.h>
16 #include <linux/io.h>
17 #include <linux/err.h>
18 #include <linux/clk.h>
19 #include <linux/clkdev.h>
20 #include <linux/clk-provider.h>
21 #include <linux/of.h>
22 #include <linux/of_address.h>
23 #include <dt-bindings/clock/ath79-clk.h>
24 
25 #include <asm/div64.h>
26 
27 #include <asm/mach-ath79/ath79.h>
28 #include <asm/mach-ath79/ar71xx_regs.h>
29 #include "common.h"
30 
31 #define AR71XX_BASE_FREQ	40000000
32 #define AR724X_BASE_FREQ	40000000
33 
34 static struct clk *clks[ATH79_CLK_END];
35 static struct clk_onecell_data clk_data = {
36 	.clks = clks,
37 	.clk_num = ARRAY_SIZE(clks),
38 };
39 
40 static const char * const clk_names[ATH79_CLK_END] = {
41 	[ATH79_CLK_CPU] = "cpu",
42 	[ATH79_CLK_DDR] = "ddr",
43 	[ATH79_CLK_AHB] = "ahb",
44 	[ATH79_CLK_REF] = "ref",
45 	[ATH79_CLK_MDIO] = "mdio",
46 };
47 
48 static const char * __init ath79_clk_name(int type)
49 {
50 	BUG_ON(type >= ARRAY_SIZE(clk_names) || !clk_names[type]);
51 	return clk_names[type];
52 }
53 
54 static void __init __ath79_set_clk(int type, const char *name, struct clk *clk)
55 {
56 	if (IS_ERR(clk))
57 		panic("failed to allocate %s clock structure", clk_names[type]);
58 
59 	clks[type] = clk;
60 	clk_register_clkdev(clk, name, NULL);
61 }
62 
63 static struct clk * __init ath79_set_clk(int type, unsigned long rate)
64 {
65 	const char *name = ath79_clk_name(type);
66 	struct clk *clk;
67 
68 	clk = clk_register_fixed_rate(NULL, name, NULL, 0, rate);
69 	__ath79_set_clk(type, name, clk);
70 	return clk;
71 }
72 
73 static struct clk * __init ath79_set_ff_clk(int type, const char *parent,
74 					    unsigned int mult, unsigned int div)
75 {
76 	const char *name = ath79_clk_name(type);
77 	struct clk *clk;
78 
79 	clk = clk_register_fixed_factor(NULL, name, parent, 0, mult, div);
80 	__ath79_set_clk(type, name, clk);
81 	return clk;
82 }
83 
84 static unsigned long __init ath79_setup_ref_clk(unsigned long rate)
85 {
86 	struct clk *clk = clks[ATH79_CLK_REF];
87 
88 	if (clk)
89 		rate = clk_get_rate(clk);
90 	else
91 		clk = ath79_set_clk(ATH79_CLK_REF, rate);
92 
93 	return rate;
94 }
95 
96 static void __init ar71xx_clocks_init(void __iomem *pll_base)
97 {
98 	unsigned long ref_rate;
99 	unsigned long cpu_rate;
100 	unsigned long ddr_rate;
101 	unsigned long ahb_rate;
102 	u32 pll;
103 	u32 freq;
104 	u32 div;
105 
106 	ref_rate = ath79_setup_ref_clk(AR71XX_BASE_FREQ);
107 
108 	pll = __raw_readl(pll_base + AR71XX_PLL_REG_CPU_CONFIG);
109 
110 	div = ((pll >> AR71XX_PLL_FB_SHIFT) & AR71XX_PLL_FB_MASK) + 1;
111 	freq = div * ref_rate;
112 
113 	div = ((pll >> AR71XX_CPU_DIV_SHIFT) & AR71XX_CPU_DIV_MASK) + 1;
114 	cpu_rate = freq / div;
115 
116 	div = ((pll >> AR71XX_DDR_DIV_SHIFT) & AR71XX_DDR_DIV_MASK) + 1;
117 	ddr_rate = freq / div;
118 
119 	div = (((pll >> AR71XX_AHB_DIV_SHIFT) & AR71XX_AHB_DIV_MASK) + 1) * 2;
120 	ahb_rate = cpu_rate / div;
121 
122 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
123 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
124 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
125 }
126 
127 static void __init ar724x_clocks_init(void __iomem *pll_base)
128 {
129 	u32 mult, div, ddr_div, ahb_div;
130 	u32 pll;
131 
132 	ath79_setup_ref_clk(AR71XX_BASE_FREQ);
133 
134 	pll = __raw_readl(pll_base + AR724X_PLL_REG_CPU_CONFIG);
135 
136 	mult = ((pll >> AR724X_PLL_FB_SHIFT) & AR724X_PLL_FB_MASK);
137 	div = ((pll >> AR724X_PLL_REF_DIV_SHIFT) & AR724X_PLL_REF_DIV_MASK) * 2;
138 
139 	ddr_div = ((pll >> AR724X_DDR_DIV_SHIFT) & AR724X_DDR_DIV_MASK) + 1;
140 	ahb_div = (((pll >> AR724X_AHB_DIV_SHIFT) & AR724X_AHB_DIV_MASK) + 1) * 2;
141 
142 	ath79_set_ff_clk(ATH79_CLK_CPU, "ref", mult, div);
143 	ath79_set_ff_clk(ATH79_CLK_DDR, "ref", mult, div * ddr_div);
144 	ath79_set_ff_clk(ATH79_CLK_AHB, "ref", mult, div * ahb_div);
145 }
146 
147 static void __init ar933x_clocks_init(void __iomem *pll_base)
148 {
149 	unsigned long ref_rate;
150 	u32 clock_ctrl;
151 	u32 ref_div;
152 	u32 ninit_mul;
153 	u32 out_div;
154 
155 	u32 cpu_div;
156 	u32 ddr_div;
157 	u32 ahb_div;
158 	u32 t;
159 
160 	t = ath79_reset_rr(AR933X_RESET_REG_BOOTSTRAP);
161 	if (t & AR933X_BOOTSTRAP_REF_CLK_40)
162 		ref_rate = (40 * 1000 * 1000);
163 	else
164 		ref_rate = (25 * 1000 * 1000);
165 
166 	ath79_setup_ref_clk(ref_rate);
167 
168 	clock_ctrl = __raw_readl(pll_base + AR933X_PLL_CLOCK_CTRL_REG);
169 	if (clock_ctrl & AR933X_PLL_CLOCK_CTRL_BYPASS) {
170 		ref_div = 1;
171 		ninit_mul = 1;
172 		out_div = 1;
173 
174 		cpu_div = 1;
175 		ddr_div = 1;
176 		ahb_div = 1;
177 	} else {
178 		u32 cpu_config;
179 		u32 t;
180 
181 		cpu_config = __raw_readl(pll_base + AR933X_PLL_CPU_CONFIG_REG);
182 
183 		t = (cpu_config >> AR933X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
184 		    AR933X_PLL_CPU_CONFIG_REFDIV_MASK;
185 		ref_div = t;
186 
187 		ninit_mul = (cpu_config >> AR933X_PLL_CPU_CONFIG_NINT_SHIFT) &
188 		    AR933X_PLL_CPU_CONFIG_NINT_MASK;
189 
190 		t = (cpu_config >> AR933X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
191 		    AR933X_PLL_CPU_CONFIG_OUTDIV_MASK;
192 		if (t == 0)
193 			t = 1;
194 
195 		out_div = (1 << t);
196 
197 		cpu_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_CPU_DIV_SHIFT) &
198 		     AR933X_PLL_CLOCK_CTRL_CPU_DIV_MASK) + 1;
199 
200 		ddr_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_DDR_DIV_SHIFT) &
201 		      AR933X_PLL_CLOCK_CTRL_DDR_DIV_MASK) + 1;
202 
203 		ahb_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_AHB_DIV_SHIFT) &
204 		     AR933X_PLL_CLOCK_CTRL_AHB_DIV_MASK) + 1;
205 	}
206 
207 	ath79_set_ff_clk(ATH79_CLK_CPU, "ref", ninit_mul,
208 			 ref_div * out_div * cpu_div);
209 	ath79_set_ff_clk(ATH79_CLK_DDR, "ref", ninit_mul,
210 			 ref_div * out_div * ddr_div);
211 	ath79_set_ff_clk(ATH79_CLK_AHB, "ref", ninit_mul,
212 			 ref_div * out_div * ahb_div);
213 }
214 
215 static u32 __init ar934x_get_pll_freq(u32 ref, u32 ref_div, u32 nint, u32 nfrac,
216 				      u32 frac, u32 out_div)
217 {
218 	u64 t;
219 	u32 ret;
220 
221 	t = ref;
222 	t *= nint;
223 	do_div(t, ref_div);
224 	ret = t;
225 
226 	t = ref;
227 	t *= nfrac;
228 	do_div(t, ref_div * frac);
229 	ret += t;
230 
231 	ret /= (1 << out_div);
232 	return ret;
233 }
234 
235 static void __init ar934x_clocks_init(void __iomem *pll_base)
236 {
237 	unsigned long ref_rate;
238 	unsigned long cpu_rate;
239 	unsigned long ddr_rate;
240 	unsigned long ahb_rate;
241 	u32 pll, out_div, ref_div, nint, nfrac, frac, clk_ctrl, postdiv;
242 	u32 cpu_pll, ddr_pll;
243 	u32 bootstrap;
244 	void __iomem *dpll_base;
245 
246 	dpll_base = ioremap(AR934X_SRIF_BASE, AR934X_SRIF_SIZE);
247 
248 	bootstrap = ath79_reset_rr(AR934X_RESET_REG_BOOTSTRAP);
249 	if (bootstrap & AR934X_BOOTSTRAP_REF_CLK_40)
250 		ref_rate = 40 * 1000 * 1000;
251 	else
252 		ref_rate = 25 * 1000 * 1000;
253 
254 	ref_rate = ath79_setup_ref_clk(ref_rate);
255 
256 	pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL2_REG);
257 	if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
258 		out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
259 			  AR934X_SRIF_DPLL2_OUTDIV_MASK;
260 		pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL1_REG);
261 		nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
262 		       AR934X_SRIF_DPLL1_NINT_MASK;
263 		nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
264 		ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
265 			  AR934X_SRIF_DPLL1_REFDIV_MASK;
266 		frac = 1 << 18;
267 	} else {
268 		pll = __raw_readl(pll_base + AR934X_PLL_CPU_CONFIG_REG);
269 		out_div = (pll >> AR934X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
270 			AR934X_PLL_CPU_CONFIG_OUTDIV_MASK;
271 		ref_div = (pll >> AR934X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
272 			  AR934X_PLL_CPU_CONFIG_REFDIV_MASK;
273 		nint = (pll >> AR934X_PLL_CPU_CONFIG_NINT_SHIFT) &
274 		       AR934X_PLL_CPU_CONFIG_NINT_MASK;
275 		nfrac = (pll >> AR934X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
276 			AR934X_PLL_CPU_CONFIG_NFRAC_MASK;
277 		frac = 1 << 6;
278 	}
279 
280 	cpu_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
281 				      nfrac, frac, out_div);
282 
283 	pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL2_REG);
284 	if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
285 		out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
286 			  AR934X_SRIF_DPLL2_OUTDIV_MASK;
287 		pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL1_REG);
288 		nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
289 		       AR934X_SRIF_DPLL1_NINT_MASK;
290 		nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
291 		ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
292 			  AR934X_SRIF_DPLL1_REFDIV_MASK;
293 		frac = 1 << 18;
294 	} else {
295 		pll = __raw_readl(pll_base + AR934X_PLL_DDR_CONFIG_REG);
296 		out_div = (pll >> AR934X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
297 			  AR934X_PLL_DDR_CONFIG_OUTDIV_MASK;
298 		ref_div = (pll >> AR934X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
299 			   AR934X_PLL_DDR_CONFIG_REFDIV_MASK;
300 		nint = (pll >> AR934X_PLL_DDR_CONFIG_NINT_SHIFT) &
301 		       AR934X_PLL_DDR_CONFIG_NINT_MASK;
302 		nfrac = (pll >> AR934X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
303 			AR934X_PLL_DDR_CONFIG_NFRAC_MASK;
304 		frac = 1 << 10;
305 	}
306 
307 	ddr_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
308 				      nfrac, frac, out_div);
309 
310 	clk_ctrl = __raw_readl(pll_base + AR934X_PLL_CPU_DDR_CLK_CTRL_REG);
311 
312 	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_SHIFT) &
313 		  AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_MASK;
314 
315 	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_PLL_BYPASS)
316 		cpu_rate = ref_rate;
317 	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPUCLK_FROM_CPUPLL)
318 		cpu_rate = cpu_pll / (postdiv + 1);
319 	else
320 		cpu_rate = ddr_pll / (postdiv + 1);
321 
322 	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_SHIFT) &
323 		  AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_MASK;
324 
325 	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_PLL_BYPASS)
326 		ddr_rate = ref_rate;
327 	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDRCLK_FROM_DDRPLL)
328 		ddr_rate = ddr_pll / (postdiv + 1);
329 	else
330 		ddr_rate = cpu_pll / (postdiv + 1);
331 
332 	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_SHIFT) &
333 		  AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_MASK;
334 
335 	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_PLL_BYPASS)
336 		ahb_rate = ref_rate;
337 	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHBCLK_FROM_DDRPLL)
338 		ahb_rate = ddr_pll / (postdiv + 1);
339 	else
340 		ahb_rate = cpu_pll / (postdiv + 1);
341 
342 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
343 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
344 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
345 
346 	clk_ctrl = __raw_readl(pll_base + AR934X_PLL_SWITCH_CLOCK_CONTROL_REG);
347 	if (clk_ctrl & AR934X_PLL_SWITCH_CLOCK_CONTROL_MDIO_CLK_SEL)
348 		ath79_set_clk(ATH79_CLK_MDIO, 100 * 1000 * 1000);
349 
350 	iounmap(dpll_base);
351 }
352 
353 static void __init qca953x_clocks_init(void __iomem *pll_base)
354 {
355 	unsigned long ref_rate;
356 	unsigned long cpu_rate;
357 	unsigned long ddr_rate;
358 	unsigned long ahb_rate;
359 	u32 pll, out_div, ref_div, nint, frac, clk_ctrl, postdiv;
360 	u32 cpu_pll, ddr_pll;
361 	u32 bootstrap;
362 
363 	bootstrap = ath79_reset_rr(QCA953X_RESET_REG_BOOTSTRAP);
364 	if (bootstrap &	QCA953X_BOOTSTRAP_REF_CLK_40)
365 		ref_rate = 40 * 1000 * 1000;
366 	else
367 		ref_rate = 25 * 1000 * 1000;
368 
369 	ref_rate = ath79_setup_ref_clk(ref_rate);
370 
371 	pll = __raw_readl(pll_base + QCA953X_PLL_CPU_CONFIG_REG);
372 	out_div = (pll >> QCA953X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
373 		  QCA953X_PLL_CPU_CONFIG_OUTDIV_MASK;
374 	ref_div = (pll >> QCA953X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
375 		  QCA953X_PLL_CPU_CONFIG_REFDIV_MASK;
376 	nint = (pll >> QCA953X_PLL_CPU_CONFIG_NINT_SHIFT) &
377 	       QCA953X_PLL_CPU_CONFIG_NINT_MASK;
378 	frac = (pll >> QCA953X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
379 	       QCA953X_PLL_CPU_CONFIG_NFRAC_MASK;
380 
381 	cpu_pll = nint * ref_rate / ref_div;
382 	cpu_pll += frac * (ref_rate >> 6) / ref_div;
383 	cpu_pll /= (1 << out_div);
384 
385 	pll = __raw_readl(pll_base + QCA953X_PLL_DDR_CONFIG_REG);
386 	out_div = (pll >> QCA953X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
387 		  QCA953X_PLL_DDR_CONFIG_OUTDIV_MASK;
388 	ref_div = (pll >> QCA953X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
389 		  QCA953X_PLL_DDR_CONFIG_REFDIV_MASK;
390 	nint = (pll >> QCA953X_PLL_DDR_CONFIG_NINT_SHIFT) &
391 	       QCA953X_PLL_DDR_CONFIG_NINT_MASK;
392 	frac = (pll >> QCA953X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
393 	       QCA953X_PLL_DDR_CONFIG_NFRAC_MASK;
394 
395 	ddr_pll = nint * ref_rate / ref_div;
396 	ddr_pll += frac * (ref_rate >> 6) / (ref_div << 4);
397 	ddr_pll /= (1 << out_div);
398 
399 	clk_ctrl = __raw_readl(pll_base + QCA953X_PLL_CLK_CTRL_REG);
400 
401 	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
402 		  QCA953X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
403 
404 	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
405 		cpu_rate = ref_rate;
406 	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_CPUCLK_FROM_CPUPLL)
407 		cpu_rate = cpu_pll / (postdiv + 1);
408 	else
409 		cpu_rate = ddr_pll / (postdiv + 1);
410 
411 	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
412 		  QCA953X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
413 
414 	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
415 		ddr_rate = ref_rate;
416 	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_DDRCLK_FROM_DDRPLL)
417 		ddr_rate = ddr_pll / (postdiv + 1);
418 	else
419 		ddr_rate = cpu_pll / (postdiv + 1);
420 
421 	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
422 		  QCA953X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
423 
424 	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
425 		ahb_rate = ref_rate;
426 	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
427 		ahb_rate = ddr_pll / (postdiv + 1);
428 	else
429 		ahb_rate = cpu_pll / (postdiv + 1);
430 
431 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
432 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
433 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
434 }
435 
436 static void __init qca955x_clocks_init(void __iomem *pll_base)
437 {
438 	unsigned long ref_rate;
439 	unsigned long cpu_rate;
440 	unsigned long ddr_rate;
441 	unsigned long ahb_rate;
442 	u32 pll, out_div, ref_div, nint, frac, clk_ctrl, postdiv;
443 	u32 cpu_pll, ddr_pll;
444 	u32 bootstrap;
445 
446 	bootstrap = ath79_reset_rr(QCA955X_RESET_REG_BOOTSTRAP);
447 	if (bootstrap &	QCA955X_BOOTSTRAP_REF_CLK_40)
448 		ref_rate = 40 * 1000 * 1000;
449 	else
450 		ref_rate = 25 * 1000 * 1000;
451 
452 	ref_rate = ath79_setup_ref_clk(ref_rate);
453 
454 	pll = __raw_readl(pll_base + QCA955X_PLL_CPU_CONFIG_REG);
455 	out_div = (pll >> QCA955X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
456 		  QCA955X_PLL_CPU_CONFIG_OUTDIV_MASK;
457 	ref_div = (pll >> QCA955X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
458 		  QCA955X_PLL_CPU_CONFIG_REFDIV_MASK;
459 	nint = (pll >> QCA955X_PLL_CPU_CONFIG_NINT_SHIFT) &
460 	       QCA955X_PLL_CPU_CONFIG_NINT_MASK;
461 	frac = (pll >> QCA955X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
462 	       QCA955X_PLL_CPU_CONFIG_NFRAC_MASK;
463 
464 	cpu_pll = nint * ref_rate / ref_div;
465 	cpu_pll += frac * ref_rate / (ref_div * (1 << 6));
466 	cpu_pll /= (1 << out_div);
467 
468 	pll = __raw_readl(pll_base + QCA955X_PLL_DDR_CONFIG_REG);
469 	out_div = (pll >> QCA955X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
470 		  QCA955X_PLL_DDR_CONFIG_OUTDIV_MASK;
471 	ref_div = (pll >> QCA955X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
472 		  QCA955X_PLL_DDR_CONFIG_REFDIV_MASK;
473 	nint = (pll >> QCA955X_PLL_DDR_CONFIG_NINT_SHIFT) &
474 	       QCA955X_PLL_DDR_CONFIG_NINT_MASK;
475 	frac = (pll >> QCA955X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
476 	       QCA955X_PLL_DDR_CONFIG_NFRAC_MASK;
477 
478 	ddr_pll = nint * ref_rate / ref_div;
479 	ddr_pll += frac * ref_rate / (ref_div * (1 << 10));
480 	ddr_pll /= (1 << out_div);
481 
482 	clk_ctrl = __raw_readl(pll_base + QCA955X_PLL_CLK_CTRL_REG);
483 
484 	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
485 		  QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
486 
487 	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
488 		cpu_rate = ref_rate;
489 	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPUCLK_FROM_CPUPLL)
490 		cpu_rate = ddr_pll / (postdiv + 1);
491 	else
492 		cpu_rate = cpu_pll / (postdiv + 1);
493 
494 	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
495 		  QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
496 
497 	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
498 		ddr_rate = ref_rate;
499 	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDRCLK_FROM_DDRPLL)
500 		ddr_rate = cpu_pll / (postdiv + 1);
501 	else
502 		ddr_rate = ddr_pll / (postdiv + 1);
503 
504 	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
505 		  QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
506 
507 	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
508 		ahb_rate = ref_rate;
509 	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
510 		ahb_rate = ddr_pll / (postdiv + 1);
511 	else
512 		ahb_rate = cpu_pll / (postdiv + 1);
513 
514 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
515 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
516 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
517 }
518 
519 static void __init qca956x_clocks_init(void __iomem *pll_base)
520 {
521 	unsigned long ref_rate;
522 	unsigned long cpu_rate;
523 	unsigned long ddr_rate;
524 	unsigned long ahb_rate;
525 	u32 pll, out_div, ref_div, nint, hfrac, lfrac, clk_ctrl, postdiv;
526 	u32 cpu_pll, ddr_pll;
527 	u32 bootstrap;
528 
529 	/*
530 	 * QCA956x timer init workaround has to be applied right before setting
531 	 * up the clock. Else, there will be no jiffies
532 	 */
533 	u32 misc;
534 
535 	misc = ath79_reset_rr(AR71XX_RESET_REG_MISC_INT_ENABLE);
536 	misc |= MISC_INT_MIPS_SI_TIMERINT_MASK;
537 	ath79_reset_wr(AR71XX_RESET_REG_MISC_INT_ENABLE, misc);
538 
539 	bootstrap = ath79_reset_rr(QCA956X_RESET_REG_BOOTSTRAP);
540 	if (bootstrap &	QCA956X_BOOTSTRAP_REF_CLK_40)
541 		ref_rate = 40 * 1000 * 1000;
542 	else
543 		ref_rate = 25 * 1000 * 1000;
544 
545 	ref_rate = ath79_setup_ref_clk(ref_rate);
546 
547 	pll = __raw_readl(pll_base + QCA956X_PLL_CPU_CONFIG_REG);
548 	out_div = (pll >> QCA956X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
549 		  QCA956X_PLL_CPU_CONFIG_OUTDIV_MASK;
550 	ref_div = (pll >> QCA956X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
551 		  QCA956X_PLL_CPU_CONFIG_REFDIV_MASK;
552 
553 	pll = __raw_readl(pll_base + QCA956X_PLL_CPU_CONFIG1_REG);
554 	nint = (pll >> QCA956X_PLL_CPU_CONFIG1_NINT_SHIFT) &
555 	       QCA956X_PLL_CPU_CONFIG1_NINT_MASK;
556 	hfrac = (pll >> QCA956X_PLL_CPU_CONFIG1_NFRAC_H_SHIFT) &
557 	       QCA956X_PLL_CPU_CONFIG1_NFRAC_H_MASK;
558 	lfrac = (pll >> QCA956X_PLL_CPU_CONFIG1_NFRAC_L_SHIFT) &
559 	       QCA956X_PLL_CPU_CONFIG1_NFRAC_L_MASK;
560 
561 	cpu_pll = nint * ref_rate / ref_div;
562 	cpu_pll += (lfrac * ref_rate) / ((ref_div * 25) << 13);
563 	cpu_pll += (hfrac >> 13) * ref_rate / ref_div;
564 	cpu_pll /= (1 << out_div);
565 
566 	pll = __raw_readl(pll_base + QCA956X_PLL_DDR_CONFIG_REG);
567 	out_div = (pll >> QCA956X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
568 		  QCA956X_PLL_DDR_CONFIG_OUTDIV_MASK;
569 	ref_div = (pll >> QCA956X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
570 		  QCA956X_PLL_DDR_CONFIG_REFDIV_MASK;
571 	pll = __raw_readl(pll_base + QCA956X_PLL_DDR_CONFIG1_REG);
572 	nint = (pll >> QCA956X_PLL_DDR_CONFIG1_NINT_SHIFT) &
573 	       QCA956X_PLL_DDR_CONFIG1_NINT_MASK;
574 	hfrac = (pll >> QCA956X_PLL_DDR_CONFIG1_NFRAC_H_SHIFT) &
575 	       QCA956X_PLL_DDR_CONFIG1_NFRAC_H_MASK;
576 	lfrac = (pll >> QCA956X_PLL_DDR_CONFIG1_NFRAC_L_SHIFT) &
577 	       QCA956X_PLL_DDR_CONFIG1_NFRAC_L_MASK;
578 
579 	ddr_pll = nint * ref_rate / ref_div;
580 	ddr_pll += (lfrac * ref_rate) / ((ref_div * 25) << 13);
581 	ddr_pll += (hfrac >> 13) * ref_rate / ref_div;
582 	ddr_pll /= (1 << out_div);
583 
584 	clk_ctrl = __raw_readl(pll_base + QCA956X_PLL_CLK_CTRL_REG);
585 
586 	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
587 		  QCA956X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
588 
589 	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
590 		cpu_rate = ref_rate;
591 	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_DDRCLK_FROM_CPUPLL)
592 		cpu_rate = ddr_pll / (postdiv + 1);
593 	else
594 		cpu_rate = cpu_pll / (postdiv + 1);
595 
596 	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
597 		  QCA956X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
598 
599 	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
600 		ddr_rate = ref_rate;
601 	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_DDRCLK_FROM_DDRPLL)
602 		ddr_rate = cpu_pll / (postdiv + 1);
603 	else
604 		ddr_rate = ddr_pll / (postdiv + 1);
605 
606 	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
607 		  QCA956X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
608 
609 	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
610 		ahb_rate = ref_rate;
611 	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
612 		ahb_rate = ddr_pll / (postdiv + 1);
613 	else
614 		ahb_rate = cpu_pll / (postdiv + 1);
615 
616 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
617 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
618 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
619 }
620 
621 static void __init ath79_clocks_init_dt(struct device_node *np)
622 {
623 	struct clk *ref_clk;
624 	void __iomem *pll_base;
625 
626 	ref_clk = of_clk_get(np, 0);
627 	if (!IS_ERR(ref_clk))
628 		clks[ATH79_CLK_REF] = ref_clk;
629 
630 	pll_base = of_iomap(np, 0);
631 	if (!pll_base) {
632 		pr_err("%pOF: can't map pll registers\n", np);
633 		goto err_clk;
634 	}
635 
636 	if (of_device_is_compatible(np, "qca,ar7100-pll"))
637 		ar71xx_clocks_init(pll_base);
638 	else if (of_device_is_compatible(np, "qca,ar7240-pll") ||
639 		 of_device_is_compatible(np, "qca,ar9130-pll"))
640 		ar724x_clocks_init(pll_base);
641 	else if (of_device_is_compatible(np, "qca,ar9330-pll"))
642 		ar933x_clocks_init(pll_base);
643 	else if (of_device_is_compatible(np, "qca,ar9340-pll"))
644 		ar934x_clocks_init(pll_base);
645 	else if (of_device_is_compatible(np, "qca,qca9530-pll"))
646 		qca953x_clocks_init(pll_base);
647 	else if (of_device_is_compatible(np, "qca,qca9550-pll"))
648 		qca955x_clocks_init(pll_base);
649 	else if (of_device_is_compatible(np, "qca,qca9560-pll"))
650 		qca956x_clocks_init(pll_base);
651 
652 	if (!clks[ATH79_CLK_MDIO])
653 		clks[ATH79_CLK_MDIO] = clks[ATH79_CLK_REF];
654 
655 	if (of_clk_add_provider(np, of_clk_src_onecell_get, &clk_data)) {
656 		pr_err("%pOF: could not register clk provider\n", np);
657 		goto err_iounmap;
658 	}
659 
660 	return;
661 
662 err_iounmap:
663 	iounmap(pll_base);
664 
665 err_clk:
666 	clk_put(ref_clk);
667 }
668 
669 CLK_OF_DECLARE(ar7100_clk, "qca,ar7100-pll", ath79_clocks_init_dt);
670 CLK_OF_DECLARE(ar7240_clk, "qca,ar7240-pll", ath79_clocks_init_dt);
671 CLK_OF_DECLARE(ar9130_clk, "qca,ar9130-pll", ath79_clocks_init_dt);
672 CLK_OF_DECLARE(ar9330_clk, "qca,ar9330-pll", ath79_clocks_init_dt);
673 CLK_OF_DECLARE(ar9340_clk, "qca,ar9340-pll", ath79_clocks_init_dt);
674 CLK_OF_DECLARE(ar9530_clk, "qca,qca9530-pll", ath79_clocks_init_dt);
675 CLK_OF_DECLARE(ar9550_clk, "qca,qca9550-pll", ath79_clocks_init_dt);
676 CLK_OF_DECLARE(ar9560_clk, "qca,qca9560-pll", ath79_clocks_init_dt);
677