1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (c) 2014 MediaTek Inc.
4 * Author: James Liao <jamesjj.liao@mediatek.com>
5 */
6
7 #include <linux/clk-provider.h>
8 #include <linux/container_of.h>
9 #include <linux/delay.h>
10 #include <linux/err.h>
11 #include <linux/io.h>
12 #include <linux/module.h>
13 #include <linux/of_address.h>
14 #include <linux/slab.h>
15
16 #include "clk-pll.h"
17
18 #define MHZ (1000 * 1000)
19
20 #define REG_CON0 0
21 #define REG_CON1 4
22
23 #define CON0_BASE_EN BIT(0)
24 #define CON0_PWR_ON BIT(0)
25 #define CON0_ISO_EN BIT(1)
26 #define PCW_CHG_BIT 31
27
28 #define AUDPLL_TUNER_EN BIT(31)
29
30 /* default 7 bits integer, can be overridden with pcwibits. */
31 #define INTEGER_BITS 7
32
mtk_pll_is_prepared(struct clk_hw * hw)33 int mtk_pll_is_prepared(struct clk_hw *hw)
34 {
35 struct mtk_clk_pll *pll = to_mtk_clk_pll(hw);
36
37 return (readl(pll->en_addr) & BIT(pll->data->pll_en_bit)) != 0;
38 }
39
mtk_pll_fenc_is_prepared(struct clk_hw * hw)40 static int mtk_pll_fenc_is_prepared(struct clk_hw *hw)
41 {
42 struct mtk_clk_pll *pll = to_mtk_clk_pll(hw);
43
44 return !!(readl(pll->fenc_addr) & BIT(pll->data->fenc_sta_bit));
45 }
46
__mtk_pll_recalc_rate(struct mtk_clk_pll * pll,u32 fin,u32 pcw,int postdiv)47 static unsigned long __mtk_pll_recalc_rate(struct mtk_clk_pll *pll, u32 fin,
48 u32 pcw, int postdiv)
49 {
50 int pcwbits = pll->data->pcwbits;
51 int pcwfbits = 0;
52 int ibits;
53 u64 vco;
54 u8 c = 0;
55
56 /* The fractional part of the PLL divider. */
57 ibits = pll->data->pcwibits ? pll->data->pcwibits : INTEGER_BITS;
58 if (pcwbits > ibits)
59 pcwfbits = pcwbits - ibits;
60
61 vco = (u64)fin * pcw;
62
63 if (pcwfbits && (vco & GENMASK(pcwfbits - 1, 0)))
64 c = 1;
65
66 vco >>= pcwfbits;
67
68 if (c)
69 vco++;
70
71 return ((unsigned long)vco + postdiv - 1) / postdiv;
72 }
73
__mtk_pll_tuner_enable(struct mtk_clk_pll * pll)74 static void __mtk_pll_tuner_enable(struct mtk_clk_pll *pll)
75 {
76 u32 r;
77
78 if (pll->tuner_en_addr) {
79 r = readl(pll->tuner_en_addr) | BIT(pll->data->tuner_en_bit);
80 writel(r, pll->tuner_en_addr);
81 } else if (pll->tuner_addr) {
82 r = readl(pll->tuner_addr) | AUDPLL_TUNER_EN;
83 writel(r, pll->tuner_addr);
84 }
85 }
86
__mtk_pll_tuner_disable(struct mtk_clk_pll * pll)87 static void __mtk_pll_tuner_disable(struct mtk_clk_pll *pll)
88 {
89 u32 r;
90
91 if (pll->tuner_en_addr) {
92 r = readl(pll->tuner_en_addr) & ~BIT(pll->data->tuner_en_bit);
93 writel(r, pll->tuner_en_addr);
94 } else if (pll->tuner_addr) {
95 r = readl(pll->tuner_addr) & ~AUDPLL_TUNER_EN;
96 writel(r, pll->tuner_addr);
97 }
98 }
99
mtk_pll_set_rate_regs(struct mtk_clk_pll * pll,u32 pcw,int postdiv)100 static void mtk_pll_set_rate_regs(struct mtk_clk_pll *pll, u32 pcw,
101 int postdiv)
102 {
103 u32 chg, val;
104
105 /* disable tuner */
106 __mtk_pll_tuner_disable(pll);
107
108 /* set postdiv */
109 val = readl(pll->pd_addr);
110 val &= ~(POSTDIV_MASK << pll->data->pd_shift);
111 val |= (ffs(postdiv) - 1) << pll->data->pd_shift;
112
113 /* postdiv and pcw need to set at the same time if on same register */
114 if (pll->pd_addr != pll->pcw_addr) {
115 writel(val, pll->pd_addr);
116 val = readl(pll->pcw_addr);
117 }
118
119 /* set pcw */
120 val &= ~GENMASK(pll->data->pcw_shift + pll->data->pcwbits - 1,
121 pll->data->pcw_shift);
122 val |= pcw << pll->data->pcw_shift;
123 writel(val, pll->pcw_addr);
124 chg = readl(pll->pcw_chg_addr) |
125 BIT(pll->data->pcw_chg_bit ? : PCW_CHG_BIT);
126 writel(chg, pll->pcw_chg_addr);
127 if (pll->tuner_addr)
128 writel(val + 1, pll->tuner_addr);
129
130 /* restore tuner_en */
131 __mtk_pll_tuner_enable(pll);
132
133 udelay(20);
134 }
135
136 /*
137 * mtk_pll_calc_values - calculate good values for a given input frequency.
138 * @pll: The pll
139 * @pcw: The pcw value (output)
140 * @postdiv: The post divider (output)
141 * @freq: The desired target frequency
142 * @fin: The input frequency
143 *
144 */
mtk_pll_calc_values(struct mtk_clk_pll * pll,u32 * pcw,u32 * postdiv,u32 freq,u32 fin)145 void mtk_pll_calc_values(struct mtk_clk_pll *pll, u32 *pcw, u32 *postdiv,
146 u32 freq, u32 fin)
147 {
148 unsigned long fmin = pll->data->fmin ? pll->data->fmin : (1000 * MHZ);
149 const struct mtk_pll_div_table *div_table = pll->data->div_table;
150 u64 _pcw;
151 int ibits;
152 u32 val;
153
154 if (freq > pll->data->fmax)
155 freq = pll->data->fmax;
156
157 if (div_table) {
158 if (freq > div_table[0].freq)
159 freq = div_table[0].freq;
160
161 for (val = 0; div_table[val + 1].freq != 0; val++) {
162 if (freq > div_table[val + 1].freq)
163 break;
164 }
165 *postdiv = 1 << val;
166 } else {
167 for (val = 0; val < 5; val++) {
168 *postdiv = 1 << val;
169 if ((u64)freq * *postdiv >= fmin)
170 break;
171 }
172 }
173
174 /* _pcw = freq * postdiv / fin * 2^pcwfbits */
175 ibits = pll->data->pcwibits ? pll->data->pcwibits : INTEGER_BITS;
176 _pcw = ((u64)freq << val) << (pll->data->pcwbits - ibits);
177 do_div(_pcw, fin);
178
179 *pcw = (u32)_pcw;
180 }
181
mtk_pll_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)182 int mtk_pll_set_rate(struct clk_hw *hw, unsigned long rate,
183 unsigned long parent_rate)
184 {
185 struct mtk_clk_pll *pll = to_mtk_clk_pll(hw);
186 u32 pcw = 0;
187 u32 postdiv;
188
189 mtk_pll_calc_values(pll, &pcw, &postdiv, rate, parent_rate);
190 mtk_pll_set_rate_regs(pll, pcw, postdiv);
191
192 return 0;
193 }
194
mtk_pll_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)195 unsigned long mtk_pll_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
196 {
197 struct mtk_clk_pll *pll = to_mtk_clk_pll(hw);
198 u32 postdiv;
199 u32 pcw;
200
201 postdiv = (readl(pll->pd_addr) >> pll->data->pd_shift) & POSTDIV_MASK;
202 postdiv = 1 << postdiv;
203
204 pcw = readl(pll->pcw_addr) >> pll->data->pcw_shift;
205 pcw &= GENMASK(pll->data->pcwbits - 1, 0);
206
207 return __mtk_pll_recalc_rate(pll, parent_rate, pcw, postdiv);
208 }
209
mtk_pll_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)210 int mtk_pll_determine_rate(struct clk_hw *hw, struct clk_rate_request *req)
211 {
212 struct mtk_clk_pll *pll = to_mtk_clk_pll(hw);
213 u32 pcw = 0;
214 int postdiv;
215
216 mtk_pll_calc_values(pll, &pcw, &postdiv, req->rate,
217 req->best_parent_rate);
218
219 req->rate = __mtk_pll_recalc_rate(pll, req->best_parent_rate, pcw,
220 postdiv);
221
222 return 0;
223 }
224
mtk_pll_prepare(struct clk_hw * hw)225 int mtk_pll_prepare(struct clk_hw *hw)
226 {
227 struct mtk_clk_pll *pll = to_mtk_clk_pll(hw);
228 u32 r;
229
230 r = readl(pll->pwr_addr) | CON0_PWR_ON;
231 writel(r, pll->pwr_addr);
232 udelay(1);
233
234 r = readl(pll->pwr_addr) & ~CON0_ISO_EN;
235 writel(r, pll->pwr_addr);
236 udelay(1);
237
238 r = readl(pll->en_addr) | BIT(pll->data->pll_en_bit);
239 writel(r, pll->en_addr);
240
241 if (pll->data->en_mask) {
242 r = readl(pll->base_addr + REG_CON0) | pll->data->en_mask;
243 writel(r, pll->base_addr + REG_CON0);
244 }
245
246 __mtk_pll_tuner_enable(pll);
247
248 udelay(20);
249
250 if (pll->data->flags & HAVE_RST_BAR) {
251 r = readl(pll->base_addr + REG_CON0);
252 r |= pll->data->rst_bar_mask;
253 writel(r, pll->base_addr + REG_CON0);
254 }
255
256 return 0;
257 }
258
mtk_pll_unprepare(struct clk_hw * hw)259 void mtk_pll_unprepare(struct clk_hw *hw)
260 {
261 struct mtk_clk_pll *pll = to_mtk_clk_pll(hw);
262 u32 r;
263
264 if (pll->data->flags & HAVE_RST_BAR) {
265 r = readl(pll->base_addr + REG_CON0);
266 r &= ~pll->data->rst_bar_mask;
267 writel(r, pll->base_addr + REG_CON0);
268 }
269
270 __mtk_pll_tuner_disable(pll);
271
272 if (pll->data->en_mask) {
273 r = readl(pll->base_addr + REG_CON0) & ~pll->data->en_mask;
274 writel(r, pll->base_addr + REG_CON0);
275 }
276
277 r = readl(pll->en_addr) & ~BIT(pll->data->pll_en_bit);
278 writel(r, pll->en_addr);
279
280 r = readl(pll->pwr_addr) | CON0_ISO_EN;
281 writel(r, pll->pwr_addr);
282
283 r = readl(pll->pwr_addr) & ~CON0_PWR_ON;
284 writel(r, pll->pwr_addr);
285 }
286
mtk_pll_prepare_setclr(struct clk_hw * hw)287 static int mtk_pll_prepare_setclr(struct clk_hw *hw)
288 {
289 struct mtk_clk_pll *pll = to_mtk_clk_pll(hw);
290
291 writel(BIT(pll->data->pll_en_bit), pll->en_set_addr);
292
293 /* Wait 20us after enable for the PLL to stabilize */
294 udelay(20);
295
296 return 0;
297 }
298
mtk_pll_unprepare_setclr(struct clk_hw * hw)299 static void mtk_pll_unprepare_setclr(struct clk_hw *hw)
300 {
301 struct mtk_clk_pll *pll = to_mtk_clk_pll(hw);
302
303 writel(BIT(pll->data->pll_en_bit), pll->en_clr_addr);
304 }
305
306 const struct clk_ops mtk_pll_ops = {
307 .is_prepared = mtk_pll_is_prepared,
308 .prepare = mtk_pll_prepare,
309 .unprepare = mtk_pll_unprepare,
310 .recalc_rate = mtk_pll_recalc_rate,
311 .determine_rate = mtk_pll_determine_rate,
312 .set_rate = mtk_pll_set_rate,
313 };
314
315 const struct clk_ops mtk_pll_fenc_clr_set_ops = {
316 .is_prepared = mtk_pll_fenc_is_prepared,
317 .prepare = mtk_pll_prepare_setclr,
318 .unprepare = mtk_pll_unprepare_setclr,
319 .recalc_rate = mtk_pll_recalc_rate,
320 .determine_rate = mtk_pll_determine_rate,
321 .set_rate = mtk_pll_set_rate,
322 };
323 EXPORT_SYMBOL_GPL(mtk_pll_fenc_clr_set_ops);
324
mtk_clk_register_pll_ops(struct mtk_clk_pll * pll,const struct mtk_pll_data * data,void __iomem * base,const struct clk_ops * pll_ops)325 struct clk_hw *mtk_clk_register_pll_ops(struct mtk_clk_pll *pll,
326 const struct mtk_pll_data *data,
327 void __iomem *base,
328 const struct clk_ops *pll_ops)
329 {
330 struct clk_init_data init = {};
331 int ret;
332 const char *parent_name = "clk26m";
333
334 pll->base_addr = base + data->reg;
335 pll->pwr_addr = base + data->pwr_reg;
336 pll->pd_addr = base + data->pd_reg;
337 pll->pcw_addr = base + data->pcw_reg;
338 if (data->pcw_chg_reg)
339 pll->pcw_chg_addr = base + data->pcw_chg_reg;
340 else
341 pll->pcw_chg_addr = pll->base_addr + REG_CON1;
342 if (data->tuner_reg)
343 pll->tuner_addr = base + data->tuner_reg;
344 if (data->tuner_en_reg || data->tuner_en_bit)
345 pll->tuner_en_addr = base + data->tuner_en_reg;
346 if (data->en_reg)
347 pll->en_addr = base + data->en_reg;
348 else
349 pll->en_addr = pll->base_addr + REG_CON0;
350 if (data->en_set_reg)
351 pll->en_set_addr = base + data->en_set_reg;
352 if (data->en_clr_reg)
353 pll->en_clr_addr = base + data->en_clr_reg;
354 pll->hw.init = &init;
355 pll->data = data;
356
357 pll->fenc_addr = base + data->fenc_sta_ofs;
358
359 init.name = data->name;
360 init.flags = (data->flags & PLL_AO) ? CLK_IS_CRITICAL : 0;
361 init.ops = pll_ops;
362 if (data->parent_name)
363 init.parent_names = &data->parent_name;
364 else
365 init.parent_names = &parent_name;
366 init.num_parents = 1;
367
368 ret = clk_hw_register(NULL, &pll->hw);
369
370 if (ret)
371 return ERR_PTR(ret);
372
373 return &pll->hw;
374 }
375
mtk_clk_register_pll(const struct mtk_pll_data * data,void __iomem * base)376 struct clk_hw *mtk_clk_register_pll(const struct mtk_pll_data *data,
377 void __iomem *base)
378 {
379 struct mtk_clk_pll *pll;
380 struct clk_hw *hw;
381 const struct clk_ops *pll_ops = data->ops ? data->ops : &mtk_pll_ops;
382
383 pll = kzalloc(sizeof(*pll), GFP_KERNEL);
384 if (!pll)
385 return ERR_PTR(-ENOMEM);
386
387 hw = mtk_clk_register_pll_ops(pll, data, base, pll_ops);
388 if (IS_ERR(hw))
389 kfree(pll);
390
391 return hw;
392 }
393
mtk_clk_unregister_pll(struct clk_hw * hw)394 void mtk_clk_unregister_pll(struct clk_hw *hw)
395 {
396 struct mtk_clk_pll *pll;
397
398 if (!hw)
399 return;
400
401 pll = to_mtk_clk_pll(hw);
402
403 clk_hw_unregister(hw);
404 kfree(pll);
405 }
406
mtk_clk_register_plls(struct device_node * node,const struct mtk_pll_data * plls,int num_plls,struct clk_hw_onecell_data * clk_data)407 int mtk_clk_register_plls(struct device_node *node,
408 const struct mtk_pll_data *plls, int num_plls,
409 struct clk_hw_onecell_data *clk_data)
410 {
411 void __iomem *base;
412 int i;
413 struct clk_hw *hw;
414
415 base = of_iomap(node, 0);
416 if (!base) {
417 pr_err("%s(): ioremap failed\n", __func__);
418 return -EINVAL;
419 }
420
421 for (i = 0; i < num_plls; i++) {
422 const struct mtk_pll_data *pll = &plls[i];
423
424 if (!IS_ERR_OR_NULL(clk_data->hws[pll->id])) {
425 pr_warn("%pOF: Trying to register duplicate clock ID: %d\n",
426 node, pll->id);
427 continue;
428 }
429
430 hw = mtk_clk_register_pll(pll, base);
431
432 if (IS_ERR(hw)) {
433 pr_err("Failed to register clk %s: %pe\n", pll->name,
434 hw);
435 goto err;
436 }
437
438 clk_data->hws[pll->id] = hw;
439 }
440
441 return 0;
442
443 err:
444 while (--i >= 0) {
445 const struct mtk_pll_data *pll = &plls[i];
446
447 mtk_clk_unregister_pll(clk_data->hws[pll->id]);
448 clk_data->hws[pll->id] = ERR_PTR(-ENOENT);
449 }
450
451 iounmap(base);
452
453 return PTR_ERR(hw);
454 }
455 EXPORT_SYMBOL_GPL(mtk_clk_register_plls);
456
mtk_clk_pll_get_base(struct clk_hw * hw,const struct mtk_pll_data * data)457 __iomem void *mtk_clk_pll_get_base(struct clk_hw *hw,
458 const struct mtk_pll_data *data)
459 {
460 struct mtk_clk_pll *pll = to_mtk_clk_pll(hw);
461
462 return pll->base_addr - data->reg;
463 }
464
mtk_clk_unregister_plls(const struct mtk_pll_data * plls,int num_plls,struct clk_hw_onecell_data * clk_data)465 void mtk_clk_unregister_plls(const struct mtk_pll_data *plls, int num_plls,
466 struct clk_hw_onecell_data *clk_data)
467 {
468 __iomem void *base = NULL;
469 int i;
470
471 if (!clk_data)
472 return;
473
474 for (i = num_plls; i > 0; i--) {
475 const struct mtk_pll_data *pll = &plls[i - 1];
476
477 if (IS_ERR_OR_NULL(clk_data->hws[pll->id]))
478 continue;
479
480 /*
481 * This is quite ugly but unfortunately the clks don't have
482 * any device tied to them, so there's no place to store the
483 * pointer to the I/O region base address. We have to fetch
484 * it from one of the registered clks.
485 */
486 base = mtk_clk_pll_get_base(clk_data->hws[pll->id], pll);
487
488 mtk_clk_unregister_pll(clk_data->hws[pll->id]);
489 clk_data->hws[pll->id] = ERR_PTR(-ENOENT);
490 }
491
492 iounmap(base);
493 }
494 EXPORT_SYMBOL_GPL(mtk_clk_unregister_plls);
495
496 MODULE_LICENSE("GPL");
497