xref: /linux/drivers/clk/qcom/clk-rcg.c (revision 3f0a50f345f78183f6e9b39c2f45ca5dcaa511ca)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2013, The Linux Foundation. All rights reserved.
4  */
5 
6 #include <linux/kernel.h>
7 #include <linux/bitops.h>
8 #include <linux/err.h>
9 #include <linux/export.h>
10 #include <linux/clk-provider.h>
11 #include <linux/regmap.h>
12 
13 #include <asm/div64.h>
14 
15 #include "clk-rcg.h"
16 #include "common.h"
17 
18 static u32 ns_to_src(struct src_sel *s, u32 ns)
19 {
20 	ns >>= s->src_sel_shift;
21 	ns &= SRC_SEL_MASK;
22 	return ns;
23 }
24 
25 static u32 src_to_ns(struct src_sel *s, u8 src, u32 ns)
26 {
27 	u32 mask;
28 
29 	mask = SRC_SEL_MASK;
30 	mask <<= s->src_sel_shift;
31 	ns &= ~mask;
32 
33 	ns |= src << s->src_sel_shift;
34 	return ns;
35 }
36 
37 static u8 clk_rcg_get_parent(struct clk_hw *hw)
38 {
39 	struct clk_rcg *rcg = to_clk_rcg(hw);
40 	int num_parents = clk_hw_get_num_parents(hw);
41 	u32 ns;
42 	int i, ret;
43 
44 	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
45 	if (ret)
46 		goto err;
47 	ns = ns_to_src(&rcg->s, ns);
48 	for (i = 0; i < num_parents; i++)
49 		if (ns == rcg->s.parent_map[i].cfg)
50 			return i;
51 
52 err:
53 	pr_debug("%s: Clock %s has invalid parent, using default.\n",
54 		 __func__, clk_hw_get_name(hw));
55 	return 0;
56 }
57 
58 static int reg_to_bank(struct clk_dyn_rcg *rcg, u32 bank)
59 {
60 	bank &= BIT(rcg->mux_sel_bit);
61 	return !!bank;
62 }
63 
64 static u8 clk_dyn_rcg_get_parent(struct clk_hw *hw)
65 {
66 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
67 	int num_parents = clk_hw_get_num_parents(hw);
68 	u32 ns, reg;
69 	int bank;
70 	int i, ret;
71 	struct src_sel *s;
72 
73 	ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
74 	if (ret)
75 		goto err;
76 	bank = reg_to_bank(rcg, reg);
77 	s = &rcg->s[bank];
78 
79 	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
80 	if (ret)
81 		goto err;
82 	ns = ns_to_src(s, ns);
83 
84 	for (i = 0; i < num_parents; i++)
85 		if (ns == s->parent_map[i].cfg)
86 			return i;
87 
88 err:
89 	pr_debug("%s: Clock %s has invalid parent, using default.\n",
90 		 __func__, clk_hw_get_name(hw));
91 	return 0;
92 }
93 
94 static int clk_rcg_set_parent(struct clk_hw *hw, u8 index)
95 {
96 	struct clk_rcg *rcg = to_clk_rcg(hw);
97 	u32 ns;
98 
99 	regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
100 	ns = src_to_ns(&rcg->s, rcg->s.parent_map[index].cfg, ns);
101 	regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
102 
103 	return 0;
104 }
105 
106 static u32 md_to_m(struct mn *mn, u32 md)
107 {
108 	md >>= mn->m_val_shift;
109 	md &= BIT(mn->width) - 1;
110 	return md;
111 }
112 
113 static u32 ns_to_pre_div(struct pre_div *p, u32 ns)
114 {
115 	ns >>= p->pre_div_shift;
116 	ns &= BIT(p->pre_div_width) - 1;
117 	return ns;
118 }
119 
120 static u32 pre_div_to_ns(struct pre_div *p, u8 pre_div, u32 ns)
121 {
122 	u32 mask;
123 
124 	mask = BIT(p->pre_div_width) - 1;
125 	mask <<= p->pre_div_shift;
126 	ns &= ~mask;
127 
128 	ns |= pre_div << p->pre_div_shift;
129 	return ns;
130 }
131 
132 static u32 mn_to_md(struct mn *mn, u32 m, u32 n, u32 md)
133 {
134 	u32 mask, mask_w;
135 
136 	mask_w = BIT(mn->width) - 1;
137 	mask = (mask_w << mn->m_val_shift) | mask_w;
138 	md &= ~mask;
139 
140 	if (n) {
141 		m <<= mn->m_val_shift;
142 		md |= m;
143 		md |= ~n & mask_w;
144 	}
145 
146 	return md;
147 }
148 
149 static u32 ns_m_to_n(struct mn *mn, u32 ns, u32 m)
150 {
151 	ns = ~ns >> mn->n_val_shift;
152 	ns &= BIT(mn->width) - 1;
153 	return ns + m;
154 }
155 
156 static u32 reg_to_mnctr_mode(struct mn *mn, u32 val)
157 {
158 	val >>= mn->mnctr_mode_shift;
159 	val &= MNCTR_MODE_MASK;
160 	return val;
161 }
162 
163 static u32 mn_to_ns(struct mn *mn, u32 m, u32 n, u32 ns)
164 {
165 	u32 mask;
166 
167 	mask = BIT(mn->width) - 1;
168 	mask <<= mn->n_val_shift;
169 	ns &= ~mask;
170 
171 	if (n) {
172 		n = n - m;
173 		n = ~n;
174 		n &= BIT(mn->width) - 1;
175 		n <<= mn->n_val_shift;
176 		ns |= n;
177 	}
178 
179 	return ns;
180 }
181 
182 static u32 mn_to_reg(struct mn *mn, u32 m, u32 n, u32 val)
183 {
184 	u32 mask;
185 
186 	mask = MNCTR_MODE_MASK << mn->mnctr_mode_shift;
187 	mask |= BIT(mn->mnctr_en_bit);
188 	val &= ~mask;
189 
190 	if (n) {
191 		val |= BIT(mn->mnctr_en_bit);
192 		val |= MNCTR_MODE_DUAL << mn->mnctr_mode_shift;
193 	}
194 
195 	return val;
196 }
197 
198 static int configure_bank(struct clk_dyn_rcg *rcg, const struct freq_tbl *f)
199 {
200 	u32 ns, md, reg;
201 	int bank, new_bank, ret, index;
202 	struct mn *mn;
203 	struct pre_div *p;
204 	struct src_sel *s;
205 	bool enabled;
206 	u32 md_reg, ns_reg;
207 	bool banked_mn = !!rcg->mn[1].width;
208 	bool banked_p = !!rcg->p[1].pre_div_width;
209 	struct clk_hw *hw = &rcg->clkr.hw;
210 
211 	enabled = __clk_is_enabled(hw->clk);
212 
213 	ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
214 	if (ret)
215 		return ret;
216 	bank = reg_to_bank(rcg, reg);
217 	new_bank = enabled ? !bank : bank;
218 
219 	ns_reg = rcg->ns_reg[new_bank];
220 	ret = regmap_read(rcg->clkr.regmap, ns_reg, &ns);
221 	if (ret)
222 		return ret;
223 
224 	if (banked_mn) {
225 		mn = &rcg->mn[new_bank];
226 		md_reg = rcg->md_reg[new_bank];
227 
228 		ns |= BIT(mn->mnctr_reset_bit);
229 		ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
230 		if (ret)
231 			return ret;
232 
233 		ret = regmap_read(rcg->clkr.regmap, md_reg, &md);
234 		if (ret)
235 			return ret;
236 		md = mn_to_md(mn, f->m, f->n, md);
237 		ret = regmap_write(rcg->clkr.regmap, md_reg, md);
238 		if (ret)
239 			return ret;
240 		ns = mn_to_ns(mn, f->m, f->n, ns);
241 		ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
242 		if (ret)
243 			return ret;
244 
245 		/* Two NS registers means mode control is in NS register */
246 		if (rcg->ns_reg[0] != rcg->ns_reg[1]) {
247 			ns = mn_to_reg(mn, f->m, f->n, ns);
248 			ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
249 			if (ret)
250 				return ret;
251 		} else {
252 			reg = mn_to_reg(mn, f->m, f->n, reg);
253 			ret = regmap_write(rcg->clkr.regmap, rcg->bank_reg,
254 					   reg);
255 			if (ret)
256 				return ret;
257 		}
258 
259 		ns &= ~BIT(mn->mnctr_reset_bit);
260 		ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
261 		if (ret)
262 			return ret;
263 	}
264 
265 	if (banked_p) {
266 		p = &rcg->p[new_bank];
267 		ns = pre_div_to_ns(p, f->pre_div - 1, ns);
268 	}
269 
270 	s = &rcg->s[new_bank];
271 	index = qcom_find_src_index(hw, s->parent_map, f->src);
272 	if (index < 0)
273 		return index;
274 	ns = src_to_ns(s, s->parent_map[index].cfg, ns);
275 	ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
276 	if (ret)
277 		return ret;
278 
279 	if (enabled) {
280 		ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
281 		if (ret)
282 			return ret;
283 		reg ^= BIT(rcg->mux_sel_bit);
284 		ret = regmap_write(rcg->clkr.regmap, rcg->bank_reg, reg);
285 		if (ret)
286 			return ret;
287 	}
288 	return 0;
289 }
290 
291 static int clk_dyn_rcg_set_parent(struct clk_hw *hw, u8 index)
292 {
293 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
294 	u32 ns, md, reg;
295 	int bank;
296 	struct freq_tbl f = { 0 };
297 	bool banked_mn = !!rcg->mn[1].width;
298 	bool banked_p = !!rcg->p[1].pre_div_width;
299 
300 	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
301 	bank = reg_to_bank(rcg, reg);
302 
303 	regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
304 
305 	if (banked_mn) {
306 		regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
307 		f.m = md_to_m(&rcg->mn[bank], md);
308 		f.n = ns_m_to_n(&rcg->mn[bank], ns, f.m);
309 	}
310 
311 	if (banked_p)
312 		f.pre_div = ns_to_pre_div(&rcg->p[bank], ns) + 1;
313 
314 	f.src = qcom_find_src_index(hw, rcg->s[bank].parent_map, index);
315 	return configure_bank(rcg, &f);
316 }
317 
318 /*
319  * Calculate m/n:d rate
320  *
321  *          parent_rate     m
322  *   rate = ----------- x  ---
323  *            pre_div       n
324  */
325 static unsigned long
326 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 pre_div)
327 {
328 	if (pre_div)
329 		rate /= pre_div + 1;
330 
331 	if (mode) {
332 		u64 tmp = rate;
333 		tmp *= m;
334 		do_div(tmp, n);
335 		rate = tmp;
336 	}
337 
338 	return rate;
339 }
340 
341 static unsigned long
342 clk_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
343 {
344 	struct clk_rcg *rcg = to_clk_rcg(hw);
345 	u32 pre_div, m = 0, n = 0, ns, md, mode = 0;
346 	struct mn *mn = &rcg->mn;
347 
348 	regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
349 	pre_div = ns_to_pre_div(&rcg->p, ns);
350 
351 	if (rcg->mn.width) {
352 		regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
353 		m = md_to_m(mn, md);
354 		n = ns_m_to_n(mn, ns, m);
355 		/* MN counter mode is in hw.enable_reg sometimes */
356 		if (rcg->clkr.enable_reg != rcg->ns_reg)
357 			regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &mode);
358 		else
359 			mode = ns;
360 		mode = reg_to_mnctr_mode(mn, mode);
361 	}
362 
363 	return calc_rate(parent_rate, m, n, mode, pre_div);
364 }
365 
366 static unsigned long
367 clk_dyn_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
368 {
369 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
370 	u32 m, n, pre_div, ns, md, mode, reg;
371 	int bank;
372 	struct mn *mn;
373 	bool banked_p = !!rcg->p[1].pre_div_width;
374 	bool banked_mn = !!rcg->mn[1].width;
375 
376 	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
377 	bank = reg_to_bank(rcg, reg);
378 
379 	regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
380 	m = n = pre_div = mode = 0;
381 
382 	if (banked_mn) {
383 		mn = &rcg->mn[bank];
384 		regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
385 		m = md_to_m(mn, md);
386 		n = ns_m_to_n(mn, ns, m);
387 		/* Two NS registers means mode control is in NS register */
388 		if (rcg->ns_reg[0] != rcg->ns_reg[1])
389 			reg = ns;
390 		mode = reg_to_mnctr_mode(mn, reg);
391 	}
392 
393 	if (banked_p)
394 		pre_div = ns_to_pre_div(&rcg->p[bank], ns);
395 
396 	return calc_rate(parent_rate, m, n, mode, pre_div);
397 }
398 
399 static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
400 		struct clk_rate_request *req,
401 		const struct parent_map *parent_map)
402 {
403 	unsigned long clk_flags, rate = req->rate;
404 	struct clk_hw *p;
405 	int index;
406 
407 	f = qcom_find_freq(f, rate);
408 	if (!f)
409 		return -EINVAL;
410 
411 	index = qcom_find_src_index(hw, parent_map, f->src);
412 	if (index < 0)
413 		return index;
414 
415 	clk_flags = clk_hw_get_flags(hw);
416 	p = clk_hw_get_parent_by_index(hw, index);
417 	if (clk_flags & CLK_SET_RATE_PARENT) {
418 		rate = rate * f->pre_div;
419 		if (f->n) {
420 			u64 tmp = rate;
421 			tmp = tmp * f->n;
422 			do_div(tmp, f->m);
423 			rate = tmp;
424 		}
425 	} else {
426 		rate =  clk_hw_get_rate(p);
427 	}
428 	req->best_parent_hw = p;
429 	req->best_parent_rate = rate;
430 	req->rate = f->freq;
431 
432 	return 0;
433 }
434 
435 static int clk_rcg_determine_rate(struct clk_hw *hw,
436 				  struct clk_rate_request *req)
437 {
438 	struct clk_rcg *rcg = to_clk_rcg(hw);
439 
440 	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req,
441 					rcg->s.parent_map);
442 }
443 
444 static int clk_dyn_rcg_determine_rate(struct clk_hw *hw,
445 				      struct clk_rate_request *req)
446 {
447 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
448 	u32 reg;
449 	int bank;
450 	struct src_sel *s;
451 
452 	regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
453 	bank = reg_to_bank(rcg, reg);
454 	s = &rcg->s[bank];
455 
456 	return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, s->parent_map);
457 }
458 
459 static int clk_rcg_bypass_determine_rate(struct clk_hw *hw,
460 					 struct clk_rate_request *req)
461 {
462 	struct clk_rcg *rcg = to_clk_rcg(hw);
463 	const struct freq_tbl *f = rcg->freq_tbl;
464 	struct clk_hw *p;
465 	int index = qcom_find_src_index(hw, rcg->s.parent_map, f->src);
466 
467 	req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
468 	req->best_parent_rate = clk_hw_round_rate(p, req->rate);
469 	req->rate = req->best_parent_rate;
470 
471 	return 0;
472 }
473 
474 static int __clk_rcg_set_rate(struct clk_rcg *rcg, const struct freq_tbl *f)
475 {
476 	u32 ns, md, ctl;
477 	struct mn *mn = &rcg->mn;
478 	u32 mask = 0;
479 	unsigned int reset_reg;
480 
481 	if (rcg->mn.reset_in_cc)
482 		reset_reg = rcg->clkr.enable_reg;
483 	else
484 		reset_reg = rcg->ns_reg;
485 
486 	if (rcg->mn.width) {
487 		mask = BIT(mn->mnctr_reset_bit);
488 		regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, mask);
489 
490 		regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
491 		md = mn_to_md(mn, f->m, f->n, md);
492 		regmap_write(rcg->clkr.regmap, rcg->md_reg, md);
493 
494 		regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
495 		/* MN counter mode is in hw.enable_reg sometimes */
496 		if (rcg->clkr.enable_reg != rcg->ns_reg) {
497 			regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &ctl);
498 			ctl = mn_to_reg(mn, f->m, f->n, ctl);
499 			regmap_write(rcg->clkr.regmap, rcg->clkr.enable_reg, ctl);
500 		} else {
501 			ns = mn_to_reg(mn, f->m, f->n, ns);
502 		}
503 		ns = mn_to_ns(mn, f->m, f->n, ns);
504 	} else {
505 		regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
506 	}
507 
508 	ns = pre_div_to_ns(&rcg->p, f->pre_div - 1, ns);
509 	regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
510 
511 	regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, 0);
512 
513 	return 0;
514 }
515 
516 static int clk_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
517 			    unsigned long parent_rate)
518 {
519 	struct clk_rcg *rcg = to_clk_rcg(hw);
520 	const struct freq_tbl *f;
521 
522 	f = qcom_find_freq(rcg->freq_tbl, rate);
523 	if (!f)
524 		return -EINVAL;
525 
526 	return __clk_rcg_set_rate(rcg, f);
527 }
528 
529 static int clk_rcg_set_floor_rate(struct clk_hw *hw, unsigned long rate,
530 				  unsigned long parent_rate)
531 {
532 	struct clk_rcg *rcg = to_clk_rcg(hw);
533 	const struct freq_tbl *f;
534 
535 	f = qcom_find_freq_floor(rcg->freq_tbl, rate);
536 	if (!f)
537 		return -EINVAL;
538 
539 	return __clk_rcg_set_rate(rcg, f);
540 }
541 
542 static int clk_rcg_bypass_set_rate(struct clk_hw *hw, unsigned long rate,
543 				unsigned long parent_rate)
544 {
545 	struct clk_rcg *rcg = to_clk_rcg(hw);
546 
547 	return __clk_rcg_set_rate(rcg, rcg->freq_tbl);
548 }
549 
550 static int clk_rcg_bypass2_determine_rate(struct clk_hw *hw,
551 				struct clk_rate_request *req)
552 {
553 	struct clk_hw *p;
554 
555 	p = req->best_parent_hw;
556 	req->best_parent_rate = clk_hw_round_rate(p, req->rate);
557 	req->rate = req->best_parent_rate;
558 
559 	return 0;
560 }
561 
562 static int clk_rcg_bypass2_set_rate(struct clk_hw *hw, unsigned long rate,
563 				unsigned long parent_rate)
564 {
565 	struct clk_rcg *rcg = to_clk_rcg(hw);
566 	struct freq_tbl f = { 0 };
567 	u32 ns, src;
568 	int i, ret, num_parents = clk_hw_get_num_parents(hw);
569 
570 	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
571 	if (ret)
572 		return ret;
573 
574 	src = ns_to_src(&rcg->s, ns);
575 	f.pre_div = ns_to_pre_div(&rcg->p, ns) + 1;
576 
577 	for (i = 0; i < num_parents; i++) {
578 		if (src == rcg->s.parent_map[i].cfg) {
579 			f.src = rcg->s.parent_map[i].src;
580 			return __clk_rcg_set_rate(rcg, &f);
581 		}
582 	}
583 
584 	return -EINVAL;
585 }
586 
587 static int clk_rcg_bypass2_set_rate_and_parent(struct clk_hw *hw,
588 		unsigned long rate, unsigned long parent_rate, u8 index)
589 {
590 	/* Read the hardware to determine parent during set_rate */
591 	return clk_rcg_bypass2_set_rate(hw, rate, parent_rate);
592 }
593 
594 struct frac_entry {
595 	int num;
596 	int den;
597 };
598 
599 static const struct frac_entry pixel_table[] = {
600 	{ 1, 2 },
601 	{ 1, 3 },
602 	{ 3, 16 },
603 	{ }
604 };
605 
606 static int clk_rcg_pixel_determine_rate(struct clk_hw *hw,
607 		struct clk_rate_request *req)
608 {
609 	int delta = 100000;
610 	const struct frac_entry *frac = pixel_table;
611 	unsigned long request, src_rate;
612 
613 	for (; frac->num; frac++) {
614 		request = (req->rate * frac->den) / frac->num;
615 
616 		src_rate = clk_hw_round_rate(req->best_parent_hw, request);
617 
618 		if ((src_rate < (request - delta)) ||
619 			(src_rate > (request + delta)))
620 			continue;
621 
622 		req->best_parent_rate = src_rate;
623 		req->rate = (src_rate * frac->num) / frac->den;
624 		return 0;
625 	}
626 
627 	return -EINVAL;
628 }
629 
630 static int clk_rcg_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
631 				unsigned long parent_rate)
632 {
633 	struct clk_rcg *rcg = to_clk_rcg(hw);
634 	int delta = 100000;
635 	const struct frac_entry *frac = pixel_table;
636 	unsigned long request;
637 	struct freq_tbl f = { 0 };
638 	u32 ns, src;
639 	int i, ret, num_parents = clk_hw_get_num_parents(hw);
640 
641 	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
642 	if (ret)
643 		return ret;
644 
645 	src = ns_to_src(&rcg->s, ns);
646 
647 	for (i = 0; i < num_parents; i++) {
648 		if (src == rcg->s.parent_map[i].cfg) {
649 			f.src = rcg->s.parent_map[i].src;
650 			break;
651 		}
652 	}
653 
654 	/* bypass the pre divider */
655 	f.pre_div = 1;
656 
657 	/* let us find appropriate m/n values for this */
658 	for (; frac->num; frac++) {
659 		request = (rate * frac->den) / frac->num;
660 
661 		if ((parent_rate < (request - delta)) ||
662 			(parent_rate > (request + delta)))
663 			continue;
664 
665 		f.m = frac->num;
666 		f.n = frac->den;
667 
668 		return __clk_rcg_set_rate(rcg, &f);
669 	}
670 
671 	return -EINVAL;
672 }
673 
674 static int clk_rcg_pixel_set_rate_and_parent(struct clk_hw *hw,
675 		unsigned long rate, unsigned long parent_rate, u8 index)
676 {
677 	return clk_rcg_pixel_set_rate(hw, rate, parent_rate);
678 }
679 
680 static int clk_rcg_esc_determine_rate(struct clk_hw *hw,
681 		struct clk_rate_request *req)
682 {
683 	struct clk_rcg *rcg = to_clk_rcg(hw);
684 	int pre_div_max = BIT(rcg->p.pre_div_width);
685 	int div;
686 	unsigned long src_rate;
687 
688 	if (req->rate == 0)
689 		return -EINVAL;
690 
691 	src_rate = clk_hw_get_rate(req->best_parent_hw);
692 
693 	div = src_rate / req->rate;
694 
695 	if (div >= 1 && div <= pre_div_max) {
696 		req->best_parent_rate = src_rate;
697 		req->rate = src_rate / div;
698 		return 0;
699 	}
700 
701 	return -EINVAL;
702 }
703 
704 static int clk_rcg_esc_set_rate(struct clk_hw *hw, unsigned long rate,
705 				unsigned long parent_rate)
706 {
707 	struct clk_rcg *rcg = to_clk_rcg(hw);
708 	struct freq_tbl f = { 0 };
709 	int pre_div_max = BIT(rcg->p.pre_div_width);
710 	int div;
711 	u32 ns;
712 	int i, ret, num_parents = clk_hw_get_num_parents(hw);
713 
714 	if (rate == 0)
715 		return -EINVAL;
716 
717 	ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
718 	if (ret)
719 		return ret;
720 
721 	ns = ns_to_src(&rcg->s, ns);
722 
723 	for (i = 0; i < num_parents; i++) {
724 		if (ns == rcg->s.parent_map[i].cfg) {
725 			f.src = rcg->s.parent_map[i].src;
726 			break;
727 		}
728 	}
729 
730 	div = parent_rate / rate;
731 
732 	if (div >= 1 && div <= pre_div_max) {
733 		f.pre_div = div;
734 		return __clk_rcg_set_rate(rcg, &f);
735 	}
736 
737 	return -EINVAL;
738 }
739 
740 static int clk_rcg_esc_set_rate_and_parent(struct clk_hw *hw,
741 		unsigned long rate, unsigned long parent_rate, u8 index)
742 {
743 	return clk_rcg_esc_set_rate(hw, rate, parent_rate);
744 }
745 
746 /*
747  * This type of clock has a glitch-free mux that switches between the output of
748  * the M/N counter and an always on clock source (XO). When clk_set_rate() is
749  * called we need to make sure that we don't switch to the M/N counter if it
750  * isn't clocking because the mux will get stuck and the clock will stop
751  * outputting a clock. This can happen if the framework isn't aware that this
752  * clock is on and so clk_set_rate() doesn't turn on the new parent. To fix
753  * this we switch the mux in the enable/disable ops and reprogram the M/N
754  * counter in the set_rate op. We also make sure to switch away from the M/N
755  * counter in set_rate if software thinks the clock is off.
756  */
757 static int clk_rcg_lcc_set_rate(struct clk_hw *hw, unsigned long rate,
758 				unsigned long parent_rate)
759 {
760 	struct clk_rcg *rcg = to_clk_rcg(hw);
761 	const struct freq_tbl *f;
762 	int ret;
763 	u32 gfm = BIT(10);
764 
765 	f = qcom_find_freq(rcg->freq_tbl, rate);
766 	if (!f)
767 		return -EINVAL;
768 
769 	/* Switch to XO to avoid glitches */
770 	regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, 0);
771 	ret = __clk_rcg_set_rate(rcg, f);
772 	/* Switch back to M/N if it's clocking */
773 	if (__clk_is_enabled(hw->clk))
774 		regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, gfm);
775 
776 	return ret;
777 }
778 
779 static int clk_rcg_lcc_enable(struct clk_hw *hw)
780 {
781 	struct clk_rcg *rcg = to_clk_rcg(hw);
782 	u32 gfm = BIT(10);
783 
784 	/* Use M/N */
785 	return regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, gfm);
786 }
787 
788 static void clk_rcg_lcc_disable(struct clk_hw *hw)
789 {
790 	struct clk_rcg *rcg = to_clk_rcg(hw);
791 	u32 gfm = BIT(10);
792 
793 	/* Use XO */
794 	regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, 0);
795 }
796 
797 static int __clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate)
798 {
799 	struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
800 	const struct freq_tbl *f;
801 
802 	f = qcom_find_freq(rcg->freq_tbl, rate);
803 	if (!f)
804 		return -EINVAL;
805 
806 	return configure_bank(rcg, f);
807 }
808 
809 static int clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
810 			    unsigned long parent_rate)
811 {
812 	return __clk_dyn_rcg_set_rate(hw, rate);
813 }
814 
815 static int clk_dyn_rcg_set_rate_and_parent(struct clk_hw *hw,
816 		unsigned long rate, unsigned long parent_rate, u8 index)
817 {
818 	return __clk_dyn_rcg_set_rate(hw, rate);
819 }
820 
821 const struct clk_ops clk_rcg_ops = {
822 	.enable = clk_enable_regmap,
823 	.disable = clk_disable_regmap,
824 	.get_parent = clk_rcg_get_parent,
825 	.set_parent = clk_rcg_set_parent,
826 	.recalc_rate = clk_rcg_recalc_rate,
827 	.determine_rate = clk_rcg_determine_rate,
828 	.set_rate = clk_rcg_set_rate,
829 };
830 EXPORT_SYMBOL_GPL(clk_rcg_ops);
831 
832 const struct clk_ops clk_rcg_floor_ops = {
833 	.enable = clk_enable_regmap,
834 	.disable = clk_disable_regmap,
835 	.get_parent = clk_rcg_get_parent,
836 	.set_parent = clk_rcg_set_parent,
837 	.recalc_rate = clk_rcg_recalc_rate,
838 	.determine_rate = clk_rcg_determine_rate,
839 	.set_rate = clk_rcg_set_floor_rate,
840 };
841 EXPORT_SYMBOL_GPL(clk_rcg_floor_ops);
842 
843 const struct clk_ops clk_rcg_bypass_ops = {
844 	.enable = clk_enable_regmap,
845 	.disable = clk_disable_regmap,
846 	.get_parent = clk_rcg_get_parent,
847 	.set_parent = clk_rcg_set_parent,
848 	.recalc_rate = clk_rcg_recalc_rate,
849 	.determine_rate = clk_rcg_bypass_determine_rate,
850 	.set_rate = clk_rcg_bypass_set_rate,
851 };
852 EXPORT_SYMBOL_GPL(clk_rcg_bypass_ops);
853 
854 const struct clk_ops clk_rcg_bypass2_ops = {
855 	.enable = clk_enable_regmap,
856 	.disable = clk_disable_regmap,
857 	.get_parent = clk_rcg_get_parent,
858 	.set_parent = clk_rcg_set_parent,
859 	.recalc_rate = clk_rcg_recalc_rate,
860 	.determine_rate = clk_rcg_bypass2_determine_rate,
861 	.set_rate = clk_rcg_bypass2_set_rate,
862 	.set_rate_and_parent = clk_rcg_bypass2_set_rate_and_parent,
863 };
864 EXPORT_SYMBOL_GPL(clk_rcg_bypass2_ops);
865 
866 const struct clk_ops clk_rcg_pixel_ops = {
867 	.enable = clk_enable_regmap,
868 	.disable = clk_disable_regmap,
869 	.get_parent = clk_rcg_get_parent,
870 	.set_parent = clk_rcg_set_parent,
871 	.recalc_rate = clk_rcg_recalc_rate,
872 	.determine_rate = clk_rcg_pixel_determine_rate,
873 	.set_rate = clk_rcg_pixel_set_rate,
874 	.set_rate_and_parent = clk_rcg_pixel_set_rate_and_parent,
875 };
876 EXPORT_SYMBOL_GPL(clk_rcg_pixel_ops);
877 
878 const struct clk_ops clk_rcg_esc_ops = {
879 	.enable = clk_enable_regmap,
880 	.disable = clk_disable_regmap,
881 	.get_parent = clk_rcg_get_parent,
882 	.set_parent = clk_rcg_set_parent,
883 	.recalc_rate = clk_rcg_recalc_rate,
884 	.determine_rate = clk_rcg_esc_determine_rate,
885 	.set_rate = clk_rcg_esc_set_rate,
886 	.set_rate_and_parent = clk_rcg_esc_set_rate_and_parent,
887 };
888 EXPORT_SYMBOL_GPL(clk_rcg_esc_ops);
889 
890 const struct clk_ops clk_rcg_lcc_ops = {
891 	.enable = clk_rcg_lcc_enable,
892 	.disable = clk_rcg_lcc_disable,
893 	.get_parent = clk_rcg_get_parent,
894 	.set_parent = clk_rcg_set_parent,
895 	.recalc_rate = clk_rcg_recalc_rate,
896 	.determine_rate = clk_rcg_determine_rate,
897 	.set_rate = clk_rcg_lcc_set_rate,
898 };
899 EXPORT_SYMBOL_GPL(clk_rcg_lcc_ops);
900 
901 const struct clk_ops clk_dyn_rcg_ops = {
902 	.enable = clk_enable_regmap,
903 	.is_enabled = clk_is_enabled_regmap,
904 	.disable = clk_disable_regmap,
905 	.get_parent = clk_dyn_rcg_get_parent,
906 	.set_parent = clk_dyn_rcg_set_parent,
907 	.recalc_rate = clk_dyn_rcg_recalc_rate,
908 	.determine_rate = clk_dyn_rcg_determine_rate,
909 	.set_rate = clk_dyn_rcg_set_rate,
910 	.set_rate_and_parent = clk_dyn_rcg_set_rate_and_parent,
911 };
912 EXPORT_SYMBOL_GPL(clk_dyn_rcg_ops);
913