xref: /linux/drivers/clk/clk-composite.c (revision 5ea5880764cbb164afb17a62e76ca75dc371409d)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2013 NVIDIA CORPORATION.  All rights reserved.
4  */
5 
6 #include <linux/clk-provider.h>
7 #include <linux/device.h>
8 #include <linux/err.h>
9 #include <linux/slab.h>
10 
11 static u8 clk_composite_get_parent(struct clk_hw *hw)
12 {
13 	struct clk_composite *composite = to_clk_composite(hw);
14 	const struct clk_ops *mux_ops = composite->mux_ops;
15 	struct clk_hw *mux_hw = composite->mux_hw;
16 
17 	__clk_hw_set_clk(mux_hw, hw);
18 
19 	return mux_ops->get_parent(mux_hw);
20 }
21 
22 static int clk_composite_set_parent(struct clk_hw *hw, u8 index)
23 {
24 	struct clk_composite *composite = to_clk_composite(hw);
25 	const struct clk_ops *mux_ops = composite->mux_ops;
26 	struct clk_hw *mux_hw = composite->mux_hw;
27 
28 	__clk_hw_set_clk(mux_hw, hw);
29 
30 	return mux_ops->set_parent(mux_hw, index);
31 }
32 
33 static unsigned long clk_composite_recalc_rate(struct clk_hw *hw,
34 					    unsigned long parent_rate)
35 {
36 	struct clk_composite *composite = to_clk_composite(hw);
37 	const struct clk_ops *rate_ops = composite->rate_ops;
38 	struct clk_hw *rate_hw = composite->rate_hw;
39 
40 	__clk_hw_set_clk(rate_hw, hw);
41 
42 	return rate_ops->recalc_rate(rate_hw, parent_rate);
43 }
44 
45 static int clk_composite_determine_rate_for_parent(struct clk_hw *rate_hw,
46 						   struct clk_rate_request *req,
47 						   struct clk_hw *parent_hw,
48 						   const struct clk_ops *rate_ops)
49 {
50 	req->best_parent_hw = parent_hw;
51 	req->best_parent_rate = clk_hw_get_rate(parent_hw);
52 
53 	return rate_ops->determine_rate(rate_hw, req);
54 }
55 
56 static int clk_composite_determine_rate(struct clk_hw *hw,
57 					struct clk_rate_request *req)
58 {
59 	struct clk_composite *composite = to_clk_composite(hw);
60 	const struct clk_ops *rate_ops = composite->rate_ops;
61 	const struct clk_ops *mux_ops = composite->mux_ops;
62 	struct clk_hw *rate_hw = composite->rate_hw;
63 	struct clk_hw *mux_hw = composite->mux_hw;
64 	struct clk_hw *parent;
65 	unsigned long rate_diff;
66 	unsigned long best_rate_diff = ULONG_MAX;
67 	unsigned long best_rate = 0;
68 	int i, ret;
69 
70 	if (rate_hw && rate_ops && rate_ops->determine_rate &&
71 	    mux_hw && mux_ops && mux_ops->set_parent) {
72 		req->best_parent_hw = NULL;
73 
74 		if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
75 			struct clk_rate_request tmp_req;
76 
77 			parent = clk_hw_get_parent(mux_hw);
78 
79 			clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
80 			ret = clk_composite_determine_rate_for_parent(rate_hw,
81 								      &tmp_req,
82 								      parent,
83 								      rate_ops);
84 			if (ret)
85 				return ret;
86 
87 			req->rate = tmp_req.rate;
88 			req->best_parent_hw = tmp_req.best_parent_hw;
89 			req->best_parent_rate = tmp_req.best_parent_rate;
90 
91 			return 0;
92 		}
93 
94 		for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) {
95 			struct clk_rate_request tmp_req;
96 
97 			parent = clk_hw_get_parent_by_index(mux_hw, i);
98 			if (!parent)
99 				continue;
100 
101 			clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
102 			ret = clk_composite_determine_rate_for_parent(rate_hw,
103 								      &tmp_req,
104 								      parent,
105 								      rate_ops);
106 			if (ret)
107 				continue;
108 
109 			if (req->rate >= tmp_req.rate)
110 				rate_diff = req->rate - tmp_req.rate;
111 			else
112 				rate_diff = tmp_req.rate - req->rate;
113 
114 			if (!rate_diff || !req->best_parent_hw
115 				       || best_rate_diff > rate_diff) {
116 				req->best_parent_hw = parent;
117 				req->best_parent_rate = tmp_req.best_parent_rate;
118 				best_rate_diff = rate_diff;
119 				best_rate = tmp_req.rate;
120 			}
121 
122 			if (!rate_diff)
123 				return 0;
124 		}
125 
126 		req->rate = best_rate;
127 		return 0;
128 	} else if (rate_hw && rate_ops && rate_ops->determine_rate) {
129 		__clk_hw_set_clk(rate_hw, hw);
130 		return rate_ops->determine_rate(rate_hw, req);
131 	} else if (mux_hw && mux_ops && mux_ops->determine_rate) {
132 		__clk_hw_set_clk(mux_hw, hw);
133 		return mux_ops->determine_rate(mux_hw, req);
134 	} else {
135 		pr_err("clk: clk_composite_determine_rate function called, but no mux or rate callback set!\n");
136 		return -EINVAL;
137 	}
138 }
139 
140 static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate,
141 			       unsigned long parent_rate)
142 {
143 	struct clk_composite *composite = to_clk_composite(hw);
144 	const struct clk_ops *rate_ops = composite->rate_ops;
145 	struct clk_hw *rate_hw = composite->rate_hw;
146 
147 	__clk_hw_set_clk(rate_hw, hw);
148 
149 	return rate_ops->set_rate(rate_hw, rate, parent_rate);
150 }
151 
152 static int clk_composite_set_rate_and_parent(struct clk_hw *hw,
153 					     unsigned long rate,
154 					     unsigned long parent_rate,
155 					     u8 index)
156 {
157 	struct clk_composite *composite = to_clk_composite(hw);
158 	const struct clk_ops *rate_ops = composite->rate_ops;
159 	const struct clk_ops *mux_ops = composite->mux_ops;
160 	struct clk_hw *rate_hw = composite->rate_hw;
161 	struct clk_hw *mux_hw = composite->mux_hw;
162 	unsigned long temp_rate;
163 
164 	__clk_hw_set_clk(rate_hw, hw);
165 	__clk_hw_set_clk(mux_hw, hw);
166 
167 	temp_rate = rate_ops->recalc_rate(rate_hw, parent_rate);
168 	if (temp_rate > rate) {
169 		rate_ops->set_rate(rate_hw, rate, parent_rate);
170 		mux_ops->set_parent(mux_hw, index);
171 	} else {
172 		mux_ops->set_parent(mux_hw, index);
173 		rate_ops->set_rate(rate_hw, rate, parent_rate);
174 	}
175 
176 	return 0;
177 }
178 
179 static int clk_composite_is_enabled(struct clk_hw *hw)
180 {
181 	struct clk_composite *composite = to_clk_composite(hw);
182 	const struct clk_ops *gate_ops = composite->gate_ops;
183 	struct clk_hw *gate_hw = composite->gate_hw;
184 
185 	__clk_hw_set_clk(gate_hw, hw);
186 
187 	return gate_ops->is_enabled(gate_hw);
188 }
189 
190 static int clk_composite_enable(struct clk_hw *hw)
191 {
192 	struct clk_composite *composite = to_clk_composite(hw);
193 	const struct clk_ops *gate_ops = composite->gate_ops;
194 	struct clk_hw *gate_hw = composite->gate_hw;
195 
196 	__clk_hw_set_clk(gate_hw, hw);
197 
198 	return gate_ops->enable(gate_hw);
199 }
200 
201 static void clk_composite_disable(struct clk_hw *hw)
202 {
203 	struct clk_composite *composite = to_clk_composite(hw);
204 	const struct clk_ops *gate_ops = composite->gate_ops;
205 	struct clk_hw *gate_hw = composite->gate_hw;
206 
207 	__clk_hw_set_clk(gate_hw, hw);
208 
209 	gate_ops->disable(gate_hw);
210 }
211 
212 static struct clk_hw *__clk_hw_register_composite(struct device *dev,
213 			const char *name, const char * const *parent_names,
214 			const struct clk_parent_data *pdata, int num_parents,
215 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
216 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
217 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
218 			unsigned long flags)
219 {
220 	struct clk_hw *hw;
221 	struct clk_init_data init = {};
222 	struct clk_composite *composite;
223 	struct clk_ops *clk_composite_ops;
224 	int ret;
225 
226 	composite = kzalloc_obj(*composite);
227 	if (!composite)
228 		return ERR_PTR(-ENOMEM);
229 
230 	init.name = name;
231 	init.flags = flags;
232 	if (parent_names)
233 		init.parent_names = parent_names;
234 	else
235 		init.parent_data = pdata;
236 	init.num_parents = num_parents;
237 	hw = &composite->hw;
238 
239 	clk_composite_ops = &composite->ops;
240 
241 	if (mux_hw && mux_ops) {
242 		if (!mux_ops->get_parent) {
243 			hw = ERR_PTR(-EINVAL);
244 			goto err;
245 		}
246 
247 		composite->mux_hw = mux_hw;
248 		composite->mux_ops = mux_ops;
249 		clk_composite_ops->get_parent = clk_composite_get_parent;
250 		if (mux_ops->set_parent)
251 			clk_composite_ops->set_parent = clk_composite_set_parent;
252 		if (mux_ops->determine_rate)
253 			clk_composite_ops->determine_rate = clk_composite_determine_rate;
254 	}
255 
256 	if (rate_hw && rate_ops) {
257 		if (!rate_ops->recalc_rate) {
258 			hw = ERR_PTR(-EINVAL);
259 			goto err;
260 		}
261 		clk_composite_ops->recalc_rate = clk_composite_recalc_rate;
262 
263 		if (rate_ops->determine_rate)
264 			clk_composite_ops->determine_rate =
265 				clk_composite_determine_rate;
266 
267 		/* .set_rate requires .determine_rate */
268 		if (rate_ops->set_rate) {
269 			if (rate_ops->determine_rate)
270 				clk_composite_ops->set_rate =
271 						clk_composite_set_rate;
272 			else
273 				WARN(1, "%s: missing determine_rate op is required\n",
274 						__func__);
275 		}
276 
277 		composite->rate_hw = rate_hw;
278 		composite->rate_ops = rate_ops;
279 	}
280 
281 	if (mux_hw && mux_ops && rate_hw && rate_ops) {
282 		if (mux_ops->set_parent && rate_ops->set_rate)
283 			clk_composite_ops->set_rate_and_parent =
284 			clk_composite_set_rate_and_parent;
285 	}
286 
287 	if (gate_hw && gate_ops) {
288 		if (!gate_ops->is_enabled || !gate_ops->enable ||
289 		    !gate_ops->disable) {
290 			hw = ERR_PTR(-EINVAL);
291 			goto err;
292 		}
293 
294 		composite->gate_hw = gate_hw;
295 		composite->gate_ops = gate_ops;
296 		clk_composite_ops->is_enabled = clk_composite_is_enabled;
297 		clk_composite_ops->enable = clk_composite_enable;
298 		clk_composite_ops->disable = clk_composite_disable;
299 	}
300 
301 	init.ops = clk_composite_ops;
302 	composite->hw.init = &init;
303 
304 	ret = clk_hw_register(dev, hw);
305 	if (ret) {
306 		hw = ERR_PTR(ret);
307 		goto err;
308 	}
309 
310 	if (composite->mux_hw)
311 		composite->mux_hw->clk = hw->clk;
312 
313 	if (composite->rate_hw)
314 		composite->rate_hw->clk = hw->clk;
315 
316 	if (composite->gate_hw)
317 		composite->gate_hw->clk = hw->clk;
318 
319 	return hw;
320 
321 err:
322 	kfree(composite);
323 	return hw;
324 }
325 
326 struct clk_hw *clk_hw_register_composite(struct device *dev, const char *name,
327 			const char * const *parent_names, int num_parents,
328 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
329 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
330 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
331 			unsigned long flags)
332 {
333 	return __clk_hw_register_composite(dev, name, parent_names, NULL,
334 					   num_parents, mux_hw, mux_ops,
335 					   rate_hw, rate_ops, gate_hw,
336 					   gate_ops, flags);
337 }
338 EXPORT_SYMBOL_GPL(clk_hw_register_composite);
339 
340 struct clk_hw *clk_hw_register_composite_pdata(struct device *dev,
341 			const char *name,
342 			const struct clk_parent_data *parent_data,
343 			int num_parents,
344 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
345 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
346 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
347 			unsigned long flags)
348 {
349 	return __clk_hw_register_composite(dev, name, NULL, parent_data,
350 					   num_parents, mux_hw, mux_ops,
351 					   rate_hw, rate_ops, gate_hw,
352 					   gate_ops, flags);
353 }
354 
355 struct clk *clk_register_composite(struct device *dev, const char *name,
356 			const char * const *parent_names, int num_parents,
357 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
358 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
359 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
360 			unsigned long flags)
361 {
362 	struct clk_hw *hw;
363 
364 	hw = clk_hw_register_composite(dev, name, parent_names, num_parents,
365 			mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops,
366 			flags);
367 	if (IS_ERR(hw))
368 		return ERR_CAST(hw);
369 	return hw->clk;
370 }
371 EXPORT_SYMBOL_GPL(clk_register_composite);
372 
373 struct clk *clk_register_composite_pdata(struct device *dev, const char *name,
374 			const struct clk_parent_data *parent_data,
375 			int num_parents,
376 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
377 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
378 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
379 			unsigned long flags)
380 {
381 	struct clk_hw *hw;
382 
383 	hw = clk_hw_register_composite_pdata(dev, name, parent_data,
384 			num_parents, mux_hw, mux_ops, rate_hw, rate_ops,
385 			gate_hw, gate_ops, flags);
386 	if (IS_ERR(hw))
387 		return ERR_CAST(hw);
388 	return hw->clk;
389 }
390 
391 void clk_unregister_composite(struct clk *clk)
392 {
393 	struct clk_composite *composite;
394 	struct clk_hw *hw;
395 
396 	hw = __clk_get_hw(clk);
397 	if (!hw)
398 		return;
399 
400 	composite = to_clk_composite(hw);
401 
402 	clk_unregister(clk);
403 	kfree(composite);
404 }
405 
406 void clk_hw_unregister_composite(struct clk_hw *hw)
407 {
408 	struct clk_composite *composite;
409 
410 	composite = to_clk_composite(hw);
411 
412 	clk_hw_unregister(hw);
413 	kfree(composite);
414 }
415 EXPORT_SYMBOL_GPL(clk_hw_unregister_composite);
416 
417 static void devm_clk_hw_release_composite(struct device *dev, void *res)
418 {
419 	clk_hw_unregister_composite(*(struct clk_hw **)res);
420 }
421 
422 static struct clk_hw *__devm_clk_hw_register_composite(struct device *dev,
423 			const char *name, const char * const *parent_names,
424 			const struct clk_parent_data *pdata, int num_parents,
425 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
426 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
427 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
428 			unsigned long flags)
429 {
430 	struct clk_hw **ptr, *hw;
431 
432 	ptr = devres_alloc(devm_clk_hw_release_composite, sizeof(*ptr),
433 			   GFP_KERNEL);
434 	if (!ptr)
435 		return ERR_PTR(-ENOMEM);
436 
437 	hw = __clk_hw_register_composite(dev, name, parent_names, pdata,
438 					 num_parents, mux_hw, mux_ops, rate_hw,
439 					 rate_ops, gate_hw, gate_ops, flags);
440 
441 	if (!IS_ERR(hw)) {
442 		*ptr = hw;
443 		devres_add(dev, ptr);
444 	} else {
445 		devres_free(ptr);
446 	}
447 
448 	return hw;
449 }
450 
451 struct clk_hw *devm_clk_hw_register_composite_pdata(struct device *dev,
452 			const char *name,
453 			const struct clk_parent_data *parent_data,
454 			int num_parents,
455 			struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
456 			struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
457 			struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
458 			unsigned long flags)
459 {
460 	return __devm_clk_hw_register_composite(dev, name, NULL, parent_data,
461 						num_parents, mux_hw, mux_ops,
462 						rate_hw, rate_ops, gate_hw,
463 						gate_ops, flags);
464 }
465