Lines Matching full:core

54 	struct clk_core		*core;  member
103 struct clk_core *core; member
114 static int clk_pm_runtime_get(struct clk_core *core) in clk_pm_runtime_get() argument
116 if (!core->rpm_enabled) in clk_pm_runtime_get()
119 return pm_runtime_resume_and_get(core->dev); in clk_pm_runtime_get()
122 static void clk_pm_runtime_put(struct clk_core *core) in clk_pm_runtime_put() argument
124 if (!core->rpm_enabled) in clk_pm_runtime_put()
127 pm_runtime_put_sync(core->dev); in clk_pm_runtime_put()
148 struct clk_core *core, *failed; in clk_pm_runtime_get_all() local
161 hlist_for_each_entry(core, &clk_rpm_list, rpm_node) { in clk_pm_runtime_get_all()
162 ret = clk_pm_runtime_get(core); in clk_pm_runtime_get_all()
164 failed = core; in clk_pm_runtime_get_all()
174 hlist_for_each_entry(core, &clk_rpm_list, rpm_node) { in clk_pm_runtime_get_all()
175 if (core == failed) in clk_pm_runtime_get_all()
178 clk_pm_runtime_put(core); in clk_pm_runtime_get_all()
193 struct clk_core *core; in clk_pm_runtime_put_all() local
195 hlist_for_each_entry(core, &clk_rpm_list, rpm_node) in clk_pm_runtime_put_all()
196 clk_pm_runtime_put(core); in clk_pm_runtime_put_all()
200 static void clk_pm_runtime_init(struct clk_core *core) in clk_pm_runtime_init() argument
202 struct device *dev = core->dev; in clk_pm_runtime_init()
205 core->rpm_enabled = true; in clk_pm_runtime_init()
208 hlist_add_head(&core->rpm_node, &clk_rpm_list); in clk_pm_runtime_init()
282 static bool clk_core_rate_is_protected(struct clk_core *core) in clk_core_rate_is_protected() argument
284 return core->protect_count; in clk_core_rate_is_protected()
287 static bool clk_core_is_prepared(struct clk_core *core) in clk_core_is_prepared() argument
295 if (!core->ops->is_prepared) in clk_core_is_prepared()
296 return core->prepare_count; in clk_core_is_prepared()
298 if (!clk_pm_runtime_get(core)) { in clk_core_is_prepared()
299 ret = core->ops->is_prepared(core->hw); in clk_core_is_prepared()
300 clk_pm_runtime_put(core); in clk_core_is_prepared()
306 static bool clk_core_is_enabled(struct clk_core *core) in clk_core_is_enabled() argument
314 if (!core->ops->is_enabled) in clk_core_is_enabled()
315 return core->enable_count; in clk_core_is_enabled()
327 if (core->rpm_enabled) { in clk_core_is_enabled()
328 pm_runtime_get_noresume(core->dev); in clk_core_is_enabled()
329 if (!pm_runtime_active(core->dev)) { in clk_core_is_enabled()
340 if ((core->flags & CLK_OPS_PARENT_ENABLE) && core->parent) in clk_core_is_enabled()
341 if (!clk_core_is_enabled(core->parent)) { in clk_core_is_enabled()
346 ret = core->ops->is_enabled(core->hw); in clk_core_is_enabled()
348 if (core->rpm_enabled) in clk_core_is_enabled()
349 pm_runtime_put(core->dev); in clk_core_is_enabled()
358 return !clk ? NULL : clk->core->name; in __clk_get_name()
364 return hw->core->name; in clk_hw_get_name()
370 return !clk ? NULL : clk->core->hw; in __clk_get_hw()
376 return hw->core->num_parents; in clk_hw_get_num_parents()
382 return hw->core->parent ? hw->core->parent->hw : NULL; in clk_hw_get_parent()
387 struct clk_core *core) in __clk_lookup_subtree() argument
392 if (!strcmp(core->name, name)) in __clk_lookup_subtree()
393 return core; in __clk_lookup_subtree()
395 hlist_for_each_entry(child, &core->children, child_node) { in __clk_lookup_subtree()
450 * @core: clk to find parent of
484 static struct clk_core *clk_core_get(struct clk_core *core, u8 p_index) in clk_core_get() argument
486 const char *name = core->parents[p_index].fw_name; in clk_core_get()
487 int index = core->parents[p_index].index; in clk_core_get()
489 struct device *dev = core->dev; in clk_core_get()
491 struct device_node *np = core->of_node; in clk_core_get()
512 return hw->core; in clk_core_get()
515 static void clk_core_fill_parent_index(struct clk_core *core, u8 index) in clk_core_fill_parent_index() argument
517 struct clk_parent_map *entry = &core->parents[index]; in clk_core_fill_parent_index()
521 parent = entry->hw->core; in clk_core_fill_parent_index()
523 parent = clk_core_get(core, index); in clk_core_fill_parent_index()
538 entry->core = parent; in clk_core_fill_parent_index()
541 static struct clk_core *clk_core_get_parent_by_index(struct clk_core *core, in clk_core_get_parent_by_index() argument
544 if (!core || index >= core->num_parents || !core->parents) in clk_core_get_parent_by_index()
547 if (!core->parents[index].core) in clk_core_get_parent_by_index()
548 clk_core_fill_parent_index(core, index); in clk_core_get_parent_by_index()
550 return core->parents[index].core; in clk_core_get_parent_by_index()
558 parent = clk_core_get_parent_by_index(hw->core, index); in clk_hw_get_parent_by_index()
566 return !clk ? 0 : clk->core->enable_count; in __clk_get_enable_count()
569 static unsigned long clk_core_get_rate_nolock(struct clk_core *core) in clk_core_get_rate_nolock() argument
571 if (!core) in clk_core_get_rate_nolock()
574 if (!core->num_parents || core->parent) in clk_core_get_rate_nolock()
575 return core->rate; in clk_core_get_rate_nolock()
587 return clk_core_get_rate_nolock(hw->core); in clk_hw_get_rate()
591 static unsigned long clk_core_get_accuracy_no_lock(struct clk_core *core) in clk_core_get_accuracy_no_lock() argument
593 if (!core) in clk_core_get_accuracy_no_lock()
596 return core->accuracy; in clk_core_get_accuracy_no_lock()
601 return hw->core->flags; in clk_hw_get_flags()
607 return clk_core_is_prepared(hw->core); in clk_hw_is_prepared()
613 return clk_core_rate_is_protected(hw->core); in clk_hw_rate_is_protected()
619 return clk_core_is_enabled(hw->core); in clk_hw_is_enabled()
628 return clk_core_is_enabled(clk->core); in __clk_is_enabled()
641 static void clk_core_init_rate_req(struct clk_core * const core,
645 static int clk_core_round_rate_nolock(struct clk_core *core,
648 static bool clk_core_has_parent(struct clk_core *core, const struct clk_core *parent) in clk_core_has_parent() argument
654 if (core->parent == parent) in clk_core_has_parent()
657 for (i = 0; i < core->num_parents; i++) { in clk_core_has_parent()
658 tmp = clk_core_get_parent_by_index(core, i); in clk_core_has_parent()
670 clk_core_forward_rate_req(struct clk_core *core, in clk_core_forward_rate_req() argument
676 if (WARN_ON(!clk_core_has_parent(core, parent))) in clk_core_forward_rate_req()
692 struct clk_core *core = hw->core; in clk_core_determine_rate_no_reparent() local
693 struct clk_core *parent = core->parent; in clk_core_determine_rate_no_reparent()
697 if (core->flags & CLK_SET_RATE_PARENT) { in clk_core_determine_rate_no_reparent()
705 clk_core_forward_rate_req(core, req, parent, &parent_req, in clk_core_determine_rate_no_reparent()
720 best = clk_core_get_rate_nolock(core); in clk_core_determine_rate_no_reparent()
733 struct clk_core *core = hw->core, *parent, *best_parent = NULL; in clk_mux_determine_rate_flags() local
738 if (core->flags & CLK_SET_RATE_NO_REPARENT) in clk_mux_determine_rate_flags()
742 num_parents = core->num_parents; in clk_mux_determine_rate_flags()
746 parent = clk_core_get_parent_by_index(core, i); in clk_mux_determine_rate_flags()
750 if (core->flags & CLK_SET_RATE_PARENT) { in clk_mux_determine_rate_flags()
753 clk_core_forward_rate_req(core, req, parent, &parent_req, req->rate); in clk_mux_determine_rate_flags()
788 struct clk_core *core = clk_core_lookup(name); in __clk_lookup() local
790 return !core ? NULL : core->hw->clk; in __clk_lookup()
793 static void clk_core_get_boundaries(struct clk_core *core, in clk_core_get_boundaries() argument
801 *min_rate = core->min_rate; in clk_core_get_boundaries()
802 *max_rate = core->max_rate; in clk_core_get_boundaries()
804 hlist_for_each_entry(clk_user, &core->clks, clks_node) in clk_core_get_boundaries()
807 hlist_for_each_entry(clk_user, &core->clks, clks_node) in clk_core_get_boundaries()
823 clk_core_get_boundaries(hw->core, min_rate, max_rate); in clk_hw_get_rate_range()
827 static bool clk_core_check_boundaries(struct clk_core *core, in clk_core_check_boundaries() argument
835 if (min_rate > core->max_rate || max_rate < core->min_rate) in clk_core_check_boundaries()
838 hlist_for_each_entry(user, &core->clks, clks_node) in clk_core_check_boundaries()
848 hw->core->min_rate = min_rate; in clk_hw_set_rate_range()
849 hw->core->max_rate = max_rate; in clk_hw_set_rate_range()
899 static void clk_core_rate_unprotect(struct clk_core *core) in clk_core_rate_unprotect() argument
903 if (!core) in clk_core_rate_unprotect()
906 if (WARN(core->protect_count == 0, in clk_core_rate_unprotect()
907 "%s already unprotected\n", core->name)) in clk_core_rate_unprotect()
910 if (--core->protect_count > 0) in clk_core_rate_unprotect()
913 clk_core_rate_unprotect(core->parent); in clk_core_rate_unprotect()
916 static int clk_core_rate_nuke_protect(struct clk_core *core) in clk_core_rate_nuke_protect() argument
922 if (!core) in clk_core_rate_nuke_protect()
925 if (core->protect_count == 0) in clk_core_rate_nuke_protect()
928 ret = core->protect_count; in clk_core_rate_nuke_protect()
929 core->protect_count = 1; in clk_core_rate_nuke_protect()
930 clk_core_rate_unprotect(core); in clk_core_rate_nuke_protect()
967 clk_core_rate_unprotect(clk->core); in clk_rate_exclusive_put()
974 static void clk_core_rate_protect(struct clk_core *core) in clk_core_rate_protect() argument
978 if (!core) in clk_core_rate_protect()
981 if (core->protect_count == 0) in clk_core_rate_protect()
982 clk_core_rate_protect(core->parent); in clk_core_rate_protect()
984 core->protect_count++; in clk_core_rate_protect()
987 static void clk_core_rate_restore_protect(struct clk_core *core, int count) in clk_core_rate_restore_protect() argument
991 if (!core) in clk_core_rate_restore_protect()
997 clk_core_rate_protect(core); in clk_core_rate_restore_protect()
998 core->protect_count = count; in clk_core_rate_restore_protect()
1025 clk_core_rate_protect(clk->core); in clk_rate_exclusive_get()
1052 static void clk_core_unprepare(struct clk_core *core) in clk_core_unprepare() argument
1056 if (!core) in clk_core_unprepare()
1059 if (WARN(core->prepare_count == 0, in clk_core_unprepare()
1060 "%s already unprepared\n", core->name)) in clk_core_unprepare()
1063 if (WARN(core->prepare_count == 1 && core->flags & CLK_IS_CRITICAL, in clk_core_unprepare()
1064 "Unpreparing critical %s\n", core->name)) in clk_core_unprepare()
1067 if (core->flags & CLK_SET_RATE_GATE) in clk_core_unprepare()
1068 clk_core_rate_unprotect(core); in clk_core_unprepare()
1070 if (--core->prepare_count > 0) in clk_core_unprepare()
1073 WARN(core->enable_count > 0, "Unpreparing enabled %s\n", core->name); in clk_core_unprepare()
1075 trace_clk_unprepare(core); in clk_core_unprepare()
1077 if (core->ops->unprepare) in clk_core_unprepare()
1078 core->ops->unprepare(core->hw); in clk_core_unprepare()
1080 trace_clk_unprepare_complete(core); in clk_core_unprepare()
1081 clk_core_unprepare(core->parent); in clk_core_unprepare()
1082 clk_pm_runtime_put(core); in clk_core_unprepare()
1085 static void clk_core_unprepare_lock(struct clk_core *core) in clk_core_unprepare_lock() argument
1088 clk_core_unprepare(core); in clk_core_unprepare_lock()
1108 clk_core_unprepare_lock(clk->core); in clk_unprepare()
1112 static int clk_core_prepare(struct clk_core *core) in clk_core_prepare() argument
1118 if (!core) in clk_core_prepare()
1121 if (core->prepare_count == 0) { in clk_core_prepare()
1122 ret = clk_pm_runtime_get(core); in clk_core_prepare()
1126 ret = clk_core_prepare(core->parent); in clk_core_prepare()
1130 trace_clk_prepare(core); in clk_core_prepare()
1132 if (core->ops->prepare) in clk_core_prepare()
1133 ret = core->ops->prepare(core->hw); in clk_core_prepare()
1135 trace_clk_prepare_complete(core); in clk_core_prepare()
1141 core->prepare_count++; in clk_core_prepare()
1150 if (core->flags & CLK_SET_RATE_GATE) in clk_core_prepare()
1151 clk_core_rate_protect(core); in clk_core_prepare()
1155 clk_core_unprepare(core->parent); in clk_core_prepare()
1157 clk_pm_runtime_put(core); in clk_core_prepare()
1161 static int clk_core_prepare_lock(struct clk_core *core) in clk_core_prepare_lock() argument
1166 ret = clk_core_prepare(core); in clk_core_prepare_lock()
1189 return clk_core_prepare_lock(clk->core); in clk_prepare()
1193 static void clk_core_disable(struct clk_core *core) in clk_core_disable() argument
1197 if (!core) in clk_core_disable()
1200 if (WARN(core->enable_count == 0, "%s already disabled\n", core->name)) in clk_core_disable()
1203 if (WARN(core->enable_count == 1 && core->flags & CLK_IS_CRITICAL, in clk_core_disable()
1204 "Disabling critical %s\n", core->name)) in clk_core_disable()
1207 if (--core->enable_count > 0) in clk_core_disable()
1210 trace_clk_disable(core); in clk_core_disable()
1212 if (core->ops->disable) in clk_core_disable()
1213 core->ops->disable(core->hw); in clk_core_disable()
1215 trace_clk_disable_complete(core); in clk_core_disable()
1217 clk_core_disable(core->parent); in clk_core_disable()
1220 static void clk_core_disable_lock(struct clk_core *core) in clk_core_disable_lock() argument
1225 clk_core_disable(core); in clk_core_disable_lock()
1246 clk_core_disable_lock(clk->core); in clk_disable()
1250 static int clk_core_enable(struct clk_core *core) in clk_core_enable() argument
1256 if (!core) in clk_core_enable()
1259 if (WARN(core->prepare_count == 0, in clk_core_enable()
1260 "Enabling unprepared %s\n", core->name)) in clk_core_enable()
1263 if (core->enable_count == 0) { in clk_core_enable()
1264 ret = clk_core_enable(core->parent); in clk_core_enable()
1269 trace_clk_enable(core); in clk_core_enable()
1271 if (core->ops->enable) in clk_core_enable()
1272 ret = core->ops->enable(core->hw); in clk_core_enable()
1274 trace_clk_enable_complete(core); in clk_core_enable()
1277 clk_core_disable(core->parent); in clk_core_enable()
1282 core->enable_count++; in clk_core_enable()
1286 static int clk_core_enable_lock(struct clk_core *core) in clk_core_enable_lock() argument
1292 ret = clk_core_enable(core); in clk_core_enable_lock()
1310 struct clk_core *core = hw->core; in clk_gate_restore_context() local
1312 if (core->enable_count) in clk_gate_restore_context()
1313 core->ops->enable(hw); in clk_gate_restore_context()
1315 core->ops->disable(hw); in clk_gate_restore_context()
1319 static int clk_core_save_context(struct clk_core *core) in clk_core_save_context() argument
1324 hlist_for_each_entry(child, &core->children, child_node) { in clk_core_save_context()
1330 if (core->ops && core->ops->save_context) in clk_core_save_context()
1331 ret = core->ops->save_context(core->hw); in clk_core_save_context()
1336 static void clk_core_restore_context(struct clk_core *core) in clk_core_restore_context() argument
1340 if (core->ops && core->ops->restore_context) in clk_core_restore_context()
1341 core->ops->restore_context(core->hw); in clk_core_restore_context()
1343 hlist_for_each_entry(child, &core->children, child_node) in clk_core_restore_context()
1383 struct clk_core *core; in clk_restore_context() local
1385 hlist_for_each_entry(core, &clk_root_list, child_node) in clk_restore_context()
1386 clk_core_restore_context(core); in clk_restore_context()
1388 hlist_for_each_entry(core, &clk_orphan_list, child_node) in clk_restore_context()
1389 clk_core_restore_context(core); in clk_restore_context()
1411 return clk_core_enable_lock(clk->core); in clk_enable()
1432 return clk && !(clk->core->ops->enable && clk->core->ops->disable); in clk_is_enabled_when_prepared()
1436 static int clk_core_prepare_enable(struct clk_core *core) in clk_core_prepare_enable() argument
1440 ret = clk_core_prepare_lock(core); in clk_core_prepare_enable()
1444 ret = clk_core_enable_lock(core); in clk_core_prepare_enable()
1446 clk_core_unprepare_lock(core); in clk_core_prepare_enable()
1451 static void clk_core_disable_unprepare(struct clk_core *core) in clk_core_disable_unprepare() argument
1453 clk_core_disable_lock(core); in clk_core_disable_unprepare()
1454 clk_core_unprepare_lock(core); in clk_core_disable_unprepare()
1457 static void __init clk_unprepare_unused_subtree(struct clk_core *core) in clk_unprepare_unused_subtree() argument
1463 hlist_for_each_entry(child, &core->children, child_node) in clk_unprepare_unused_subtree()
1466 if (core->prepare_count) in clk_unprepare_unused_subtree()
1469 if (core->flags & CLK_IGNORE_UNUSED) in clk_unprepare_unused_subtree()
1472 if (clk_core_is_prepared(core)) { in clk_unprepare_unused_subtree()
1473 trace_clk_unprepare(core); in clk_unprepare_unused_subtree()
1474 if (core->ops->unprepare_unused) in clk_unprepare_unused_subtree()
1475 core->ops->unprepare_unused(core->hw); in clk_unprepare_unused_subtree()
1476 else if (core->ops->unprepare) in clk_unprepare_unused_subtree()
1477 core->ops->unprepare(core->hw); in clk_unprepare_unused_subtree()
1478 trace_clk_unprepare_complete(core); in clk_unprepare_unused_subtree()
1482 static void __init clk_disable_unused_subtree(struct clk_core *core) in clk_disable_unused_subtree() argument
1489 hlist_for_each_entry(child, &core->children, child_node) in clk_disable_unused_subtree()
1492 if (core->flags & CLK_OPS_PARENT_ENABLE) in clk_disable_unused_subtree()
1493 clk_core_prepare_enable(core->parent); in clk_disable_unused_subtree()
1497 if (core->enable_count) in clk_disable_unused_subtree()
1500 if (core->flags & CLK_IGNORE_UNUSED) in clk_disable_unused_subtree()
1508 if (clk_core_is_enabled(core)) { in clk_disable_unused_subtree()
1509 trace_clk_disable(core); in clk_disable_unused_subtree()
1510 if (core->ops->disable_unused) in clk_disable_unused_subtree()
1511 core->ops->disable_unused(core->hw); in clk_disable_unused_subtree()
1512 else if (core->ops->disable) in clk_disable_unused_subtree()
1513 core->ops->disable(core->hw); in clk_disable_unused_subtree()
1514 trace_clk_disable_complete(core); in clk_disable_unused_subtree()
1519 if (core->flags & CLK_OPS_PARENT_ENABLE) in clk_disable_unused_subtree()
1520 clk_core_disable_unprepare(core->parent); in clk_disable_unused_subtree()
1533 struct clk_core *core; in clk_disable_unused() local
1552 hlist_for_each_entry(core, &clk_root_list, child_node) in clk_disable_unused()
1553 clk_disable_unused_subtree(core); in clk_disable_unused()
1555 hlist_for_each_entry(core, &clk_orphan_list, child_node) in clk_disable_unused()
1556 clk_disable_unused_subtree(core); in clk_disable_unused()
1558 hlist_for_each_entry(core, &clk_root_list, child_node) in clk_disable_unused()
1559 clk_unprepare_unused_subtree(core); in clk_disable_unused()
1561 hlist_for_each_entry(core, &clk_orphan_list, child_node) in clk_disable_unused()
1562 clk_unprepare_unused_subtree(core); in clk_disable_unused()
1572 static int clk_core_determine_round_nolock(struct clk_core *core, in clk_core_determine_round_nolock() argument
1579 if (!core) in clk_core_determine_round_nolock()
1592 __func__, core->name); in clk_core_determine_round_nolock()
1597 * At this point, core protection will be disabled in clk_core_determine_round_nolock()
1602 if (clk_core_rate_is_protected(core)) { in clk_core_determine_round_nolock()
1603 req->rate = core->rate; in clk_core_determine_round_nolock()
1604 } else if (core->ops->determine_rate) { in clk_core_determine_round_nolock()
1605 return core->ops->determine_rate(core->hw, req); in clk_core_determine_round_nolock()
1606 } else if (core->ops->round_rate) { in clk_core_determine_round_nolock()
1607 rate = core->ops->round_rate(core->hw, req->rate, in clk_core_determine_round_nolock()
1620 static void clk_core_init_rate_req(struct clk_core * const core, in clk_core_init_rate_req() argument
1632 if (!core) in clk_core_init_rate_req()
1635 req->core = core; in clk_core_init_rate_req()
1637 clk_core_get_boundaries(core, &req->min_rate, &req->max_rate); in clk_core_init_rate_req()
1639 parent = core->parent; in clk_core_init_rate_req()
1665 clk_core_init_rate_req(hw->core, req, rate); in clk_hw_init_rate_request()
1689 clk_core_forward_rate_req(hw->core, old_req, in clk_hw_forward_rate_request()
1690 parent->core, req, in clk_hw_forward_rate_request()
1695 static bool clk_core_can_round(struct clk_core * const core) in clk_core_can_round() argument
1697 return core->ops->determine_rate || core->ops->round_rate; in clk_core_can_round()
1700 static int clk_core_round_rate_nolock(struct clk_core *core, in clk_core_round_rate_nolock() argument
1707 if (!core) { in clk_core_round_rate_nolock()
1712 if (clk_core_can_round(core)) in clk_core_round_rate_nolock()
1713 return clk_core_determine_round_nolock(core, req); in clk_core_round_rate_nolock()
1715 if (core->flags & CLK_SET_RATE_PARENT) { in clk_core_round_rate_nolock()
1718 clk_core_forward_rate_req(core, req, core->parent, &parent_req, req->rate); in clk_core_round_rate_nolock()
1722 ret = clk_core_round_rate_nolock(core->parent, &parent_req); in clk_core_round_rate_nolock()
1734 req->rate = core->rate; in clk_core_round_rate_nolock()
1752 return clk_core_round_rate_nolock(hw->core, req); in __clk_determine_rate()
1776 clk_core_init_rate_req(hw->core, &req, rate); in clk_hw_round_rate()
1780 ret = clk_core_round_rate_nolock(hw->core, &req); in clk_hw_round_rate()
1810 clk_core_rate_unprotect(clk->core); in clk_round_rate()
1812 clk_core_init_rate_req(clk->core, &req, rate); in clk_round_rate()
1816 ret = clk_core_round_rate_nolock(clk->core, &req); in clk_round_rate()
1821 clk_core_rate_protect(clk->core); in clk_round_rate()
1834 * @core: clk that is changing rate
1846 static int __clk_notify(struct clk_core *core, unsigned long msg, in __clk_notify() argument
1857 if (cn->clk->core == core) { in __clk_notify()
1871 * @core: first clk in the subtree
1878 static void __clk_recalc_accuracies(struct clk_core *core) in __clk_recalc_accuracies() argument
1885 if (core->parent) in __clk_recalc_accuracies()
1886 parent_accuracy = core->parent->accuracy; in __clk_recalc_accuracies()
1888 if (core->ops->recalc_accuracy) in __clk_recalc_accuracies()
1889 core->accuracy = core->ops->recalc_accuracy(core->hw, in __clk_recalc_accuracies()
1892 core->accuracy = parent_accuracy; in __clk_recalc_accuracies()
1894 hlist_for_each_entry(child, &core->children, child_node) in __clk_recalc_accuracies()
1898 static long clk_core_get_accuracy_recalc(struct clk_core *core) in clk_core_get_accuracy_recalc() argument
1900 if (core && (core->flags & CLK_GET_ACCURACY_NOCACHE)) in clk_core_get_accuracy_recalc()
1901 __clk_recalc_accuracies(core); in clk_core_get_accuracy_recalc()
1903 return clk_core_get_accuracy_no_lock(core); in clk_core_get_accuracy_recalc()
1923 accuracy = clk_core_get_accuracy_recalc(clk->core); in clk_get_accuracy()
1930 static unsigned long clk_recalc(struct clk_core *core, in clk_recalc() argument
1935 if (core->ops->recalc_rate && !clk_pm_runtime_get(core)) { in clk_recalc()
1936 rate = core->ops->recalc_rate(core->hw, parent_rate); in clk_recalc()
1937 clk_pm_runtime_put(core); in clk_recalc()
1944 * @core: first clk in the subtree
1955 static void __clk_recalc_rates(struct clk_core *core, bool update_req, in __clk_recalc_rates() argument
1964 old_rate = core->rate; in __clk_recalc_rates()
1966 if (core->parent) in __clk_recalc_rates()
1967 parent_rate = core->parent->rate; in __clk_recalc_rates()
1969 core->rate = clk_recalc(core, parent_rate); in __clk_recalc_rates()
1971 core->req_rate = core->rate; in __clk_recalc_rates()
1977 if (core->notifier_count && msg) in __clk_recalc_rates()
1978 __clk_notify(core, msg, old_rate, core->rate); in __clk_recalc_rates()
1980 hlist_for_each_entry(child, &core->children, child_node) in __clk_recalc_rates()
1984 static unsigned long clk_core_get_rate_recalc(struct clk_core *core) in clk_core_get_rate_recalc() argument
1986 if (core && (core->flags & CLK_GET_RATE_NOCACHE)) in clk_core_get_rate_recalc()
1987 __clk_recalc_rates(core, false, 0); in clk_core_get_rate_recalc()
1989 return clk_core_get_rate_nolock(core); in clk_core_get_rate_recalc()
2009 rate = clk_core_get_rate_recalc(clk->core); in clk_get_rate()
2016 static int clk_fetch_parent_index(struct clk_core *core, in clk_fetch_parent_index() argument
2024 for (i = 0; i < core->num_parents; i++) { in clk_fetch_parent_index()
2026 if (core->parents[i].core == parent) in clk_fetch_parent_index()
2030 if (core->parents[i].core) in clk_fetch_parent_index()
2033 /* Maybe core hasn't been cached but the hw is all we know? */ in clk_fetch_parent_index()
2034 if (core->parents[i].hw) { in clk_fetch_parent_index()
2035 if (core->parents[i].hw == parent->hw) in clk_fetch_parent_index()
2043 if (parent == clk_core_get(core, i)) in clk_fetch_parent_index()
2047 if (core->parents[i].name && in clk_fetch_parent_index()
2048 !strcmp(parent->name, core->parents[i].name)) in clk_fetch_parent_index()
2052 if (i == core->num_parents) in clk_fetch_parent_index()
2055 core->parents[i].core = parent; in clk_fetch_parent_index()
2073 return clk_fetch_parent_index(hw->core, parent->core); in clk_hw_get_parent_index()
2078 * Update the orphan status of @core and all its children.
2080 static void clk_core_update_orphan_status(struct clk_core *core, bool is_orphan) in clk_core_update_orphan_status() argument
2084 core->orphan = is_orphan; in clk_core_update_orphan_status()
2086 hlist_for_each_entry(child, &core->children, child_node) in clk_core_update_orphan_status()
2090 static void clk_reparent(struct clk_core *core, struct clk_core *new_parent) in clk_reparent() argument
2092 bool was_orphan = core->orphan; in clk_reparent()
2094 hlist_del(&core->child_node); in clk_reparent()
2100 if (new_parent->new_child == core) in clk_reparent()
2103 hlist_add_head(&core->child_node, &new_parent->children); in clk_reparent()
2106 clk_core_update_orphan_status(core, becomes_orphan); in clk_reparent()
2108 hlist_add_head(&core->child_node, &clk_orphan_list); in clk_reparent()
2110 clk_core_update_orphan_status(core, true); in clk_reparent()
2113 core->parent = new_parent; in clk_reparent()
2116 static struct clk_core *__clk_set_parent_before(struct clk_core *core, in __clk_set_parent_before() argument
2120 struct clk_core *old_parent = core->parent; in __clk_set_parent_before()
2143 if (core->flags & CLK_OPS_PARENT_ENABLE) { in __clk_set_parent_before()
2149 if (core->prepare_count) { in __clk_set_parent_before()
2151 clk_core_enable_lock(core); in __clk_set_parent_before()
2156 clk_reparent(core, parent); in __clk_set_parent_before()
2162 static void __clk_set_parent_after(struct clk_core *core, in __clk_set_parent_after() argument
2170 if (core->prepare_count) { in __clk_set_parent_after()
2171 clk_core_disable_lock(core); in __clk_set_parent_after()
2176 if (core->flags & CLK_OPS_PARENT_ENABLE) { in __clk_set_parent_after()
2182 static int __clk_set_parent(struct clk_core *core, struct clk_core *parent, in __clk_set_parent() argument
2189 old_parent = __clk_set_parent_before(core, parent); in __clk_set_parent()
2191 trace_clk_set_parent(core, parent); in __clk_set_parent()
2194 if (parent && core->ops->set_parent) in __clk_set_parent()
2195 ret = core->ops->set_parent(core->hw, p_index); in __clk_set_parent()
2197 trace_clk_set_parent_complete(core, parent); in __clk_set_parent()
2201 clk_reparent(core, old_parent); in __clk_set_parent()
2204 __clk_set_parent_after(core, old_parent, parent); in __clk_set_parent()
2209 __clk_set_parent_after(core, parent, old_parent); in __clk_set_parent()
2216 * @core: first clk in the subtree
2228 static int __clk_speculate_rates(struct clk_core *core, in __clk_speculate_rates() argument
2237 new_rate = clk_recalc(core, parent_rate); in __clk_speculate_rates()
2240 if (core->notifier_count) in __clk_speculate_rates()
2241 ret = __clk_notify(core, PRE_RATE_CHANGE, core->rate, new_rate); in __clk_speculate_rates()
2245 __func__, core->name, ret); in __clk_speculate_rates()
2249 hlist_for_each_entry(child, &core->children, child_node) { in __clk_speculate_rates()
2259 static void clk_calc_subtree(struct clk_core *core, unsigned long new_rate, in clk_calc_subtree() argument
2264 core->new_rate = new_rate; in clk_calc_subtree()
2265 core->new_parent = new_parent; in clk_calc_subtree()
2266 core->new_parent_index = p_index; in clk_calc_subtree()
2268 core->new_child = NULL; in clk_calc_subtree()
2269 if (new_parent && new_parent != core->parent) in clk_calc_subtree()
2270 new_parent->new_child = core; in clk_calc_subtree()
2272 hlist_for_each_entry(child, &core->children, child_node) { in clk_calc_subtree()
2282 static struct clk_core *clk_calc_new_rates(struct clk_core *core, in clk_calc_new_rates() argument
2285 struct clk_core *top = core; in clk_calc_new_rates()
2295 if (IS_ERR_OR_NULL(core)) in clk_calc_new_rates()
2299 parent = old_parent = core->parent; in clk_calc_new_rates()
2303 clk_core_get_boundaries(core, &min_rate, &max_rate); in clk_calc_new_rates()
2306 if (clk_core_can_round(core)) { in clk_calc_new_rates()
2309 clk_core_init_rate_req(core, &req, rate); in clk_calc_new_rates()
2313 ret = clk_core_determine_round_nolock(core, &req); in clk_calc_new_rates()
2321 parent = req.best_parent_hw ? req.best_parent_hw->core : NULL; in clk_calc_new_rates()
2325 } else if (!parent || !(core->flags & CLK_SET_RATE_PARENT)) { in clk_calc_new_rates()
2327 core->new_rate = core->rate; in clk_calc_new_rates()
2338 (core->flags & CLK_SET_PARENT_GATE) && core->prepare_count) { in clk_calc_new_rates()
2340 __func__, core->name); in clk_calc_new_rates()
2345 if (parent && core->num_parents > 1) { in clk_calc_new_rates()
2346 p_index = clk_fetch_parent_index(core, parent); in clk_calc_new_rates()
2349 __func__, parent->name, core->name); in clk_calc_new_rates()
2354 if ((core->flags & CLK_SET_RATE_PARENT) && parent && in clk_calc_new_rates()
2359 clk_calc_subtree(core, new_rate, parent, p_index); in clk_calc_new_rates()
2369 static struct clk_core *clk_propagate_rate_change(struct clk_core *core, in clk_propagate_rate_change() argument
2375 if (core->rate == core->new_rate) in clk_propagate_rate_change()
2378 if (core->notifier_count) { in clk_propagate_rate_change()
2379 ret = __clk_notify(core, event, core->rate, core->new_rate); in clk_propagate_rate_change()
2381 fail_clk = core; in clk_propagate_rate_change()
2384 hlist_for_each_entry(child, &core->children, child_node) { in clk_propagate_rate_change()
2386 if (child->new_parent && child->new_parent != core) in clk_propagate_rate_change()
2393 /* handle the new child who might not be in core->children yet */ in clk_propagate_rate_change()
2394 if (core->new_child) { in clk_propagate_rate_change()
2395 tmp_clk = clk_propagate_rate_change(core->new_child, event); in clk_propagate_rate_change()
2407 static void clk_change_rate(struct clk_core *core) in clk_change_rate() argument
2417 old_rate = core->rate; in clk_change_rate()
2419 if (core->new_parent) { in clk_change_rate()
2420 parent = core->new_parent; in clk_change_rate()
2421 best_parent_rate = core->new_parent->rate; in clk_change_rate()
2422 } else if (core->parent) { in clk_change_rate()
2423 parent = core->parent; in clk_change_rate()
2424 best_parent_rate = core->parent->rate; in clk_change_rate()
2427 if (clk_pm_runtime_get(core)) in clk_change_rate()
2430 if (core->flags & CLK_SET_RATE_UNGATE) { in clk_change_rate()
2431 clk_core_prepare(core); in clk_change_rate()
2432 clk_core_enable_lock(core); in clk_change_rate()
2435 if (core->new_parent && core->new_parent != core->parent) { in clk_change_rate()
2436 old_parent = __clk_set_parent_before(core, core->new_parent); in clk_change_rate()
2437 trace_clk_set_parent(core, core->new_parent); in clk_change_rate()
2439 if (core->ops->set_rate_and_parent) { in clk_change_rate()
2441 core->ops->set_rate_and_parent(core->hw, core->new_rate, in clk_change_rate()
2443 core->new_parent_index); in clk_change_rate()
2444 } else if (core->ops->set_parent) { in clk_change_rate()
2445 core->ops->set_parent(core->hw, core->new_parent_index); in clk_change_rate()
2448 trace_clk_set_parent_complete(core, core->new_parent); in clk_change_rate()
2449 __clk_set_parent_after(core, core->new_parent, old_parent); in clk_change_rate()
2452 if (core->flags & CLK_OPS_PARENT_ENABLE) in clk_change_rate()
2455 trace_clk_set_rate(core, core->new_rate); in clk_change_rate()
2457 if (!skip_set_rate && core->ops->set_rate) in clk_change_rate()
2458 core->ops->set_rate(core->hw, core->new_rate, best_parent_rate); in clk_change_rate()
2460 trace_clk_set_rate_complete(core, core->new_rate); in clk_change_rate()
2462 core->rate = clk_recalc(core, best_parent_rate); in clk_change_rate()
2464 if (core->flags & CLK_SET_RATE_UNGATE) { in clk_change_rate()
2465 clk_core_disable_lock(core); in clk_change_rate()
2466 clk_core_unprepare(core); in clk_change_rate()
2469 if (core->flags & CLK_OPS_PARENT_ENABLE) in clk_change_rate()
2472 if (core->notifier_count && old_rate != core->rate) in clk_change_rate()
2473 __clk_notify(core, POST_RATE_CHANGE, old_rate, core->rate); in clk_change_rate()
2475 if (core->flags & CLK_RECALC_NEW_RATES) in clk_change_rate()
2476 (void)clk_calc_new_rates(core, core->new_rate); in clk_change_rate()
2482 hlist_for_each_entry_safe(child, tmp, &core->children, child_node) { in clk_change_rate()
2484 if (child->new_parent && child->new_parent != core) in clk_change_rate()
2489 /* handle the new child who might not be in core->children yet */ in clk_change_rate()
2490 if (core->new_child) in clk_change_rate()
2491 clk_change_rate(core->new_child); in clk_change_rate()
2493 clk_pm_runtime_put(core); in clk_change_rate()
2496 static unsigned long clk_core_req_round_rate_nolock(struct clk_core *core, in clk_core_req_round_rate_nolock() argument
2504 if (!core) in clk_core_req_round_rate_nolock()
2508 cnt = clk_core_rate_nuke_protect(core); in clk_core_req_round_rate_nolock()
2512 clk_core_init_rate_req(core, &req, req_rate); in clk_core_req_round_rate_nolock()
2516 ret = clk_core_round_rate_nolock(core, &req); in clk_core_req_round_rate_nolock()
2521 clk_core_rate_restore_protect(core, cnt); in clk_core_req_round_rate_nolock()
2526 static int clk_core_set_rate_nolock(struct clk_core *core, in clk_core_set_rate_nolock() argument
2533 if (!core) in clk_core_set_rate_nolock()
2536 rate = clk_core_req_round_rate_nolock(core, req_rate); in clk_core_set_rate_nolock()
2539 if (rate == clk_core_get_rate_nolock(core)) in clk_core_set_rate_nolock()
2543 if (clk_core_rate_is_protected(core)) in clk_core_set_rate_nolock()
2547 top = clk_calc_new_rates(core, req_rate); in clk_core_set_rate_nolock()
2551 ret = clk_pm_runtime_get(core); in clk_core_set_rate_nolock()
2568 core->req_rate = req_rate; in clk_core_set_rate_nolock()
2570 clk_pm_runtime_put(core); in clk_core_set_rate_nolock()
2607 clk_core_rate_unprotect(clk->core); in clk_set_rate()
2609 ret = clk_core_set_rate_nolock(clk->core, rate); in clk_set_rate()
2612 clk_core_rate_protect(clk->core); in clk_set_rate()
2655 ret = clk_core_set_rate_nolock(clk->core, rate); in clk_set_rate_exclusive()
2657 clk_core_rate_protect(clk->core); in clk_set_rate_exclusive()
2679 trace_clk_set_rate_range(clk->core, min, max); in clk_set_rate_range_nolock()
2683 __func__, clk->core->name, clk->dev_id, clk->con_id, in clk_set_rate_range_nolock()
2689 clk_core_rate_unprotect(clk->core); in clk_set_rate_range_nolock()
2697 if (!clk_core_check_boundaries(clk->core, min, max)) { in clk_set_rate_range_nolock()
2702 rate = clk->core->req_rate; in clk_set_rate_range_nolock()
2703 if (clk->core->flags & CLK_GET_RATE_NOCACHE) in clk_set_rate_range_nolock()
2704 rate = clk_core_get_rate_recalc(clk->core); in clk_set_rate_range_nolock()
2724 ret = clk_core_set_rate_nolock(clk->core, rate); in clk_set_rate_range_nolock()
2733 clk_core_rate_protect(clk->core); in clk_set_rate_range_nolock()
2775 trace_clk_set_min_rate(clk->core, rate); in clk_set_min_rate()
2793 trace_clk_set_max_rate(clk->core, rate); in clk_set_max_rate()
2814 parent = !clk->core->parent ? NULL : clk->core->parent->hw->clk; in clk_get_parent()
2821 static struct clk_core *__clk_init_parent(struct clk_core *core) in __clk_init_parent() argument
2825 if (core->num_parents > 1 && core->ops->get_parent) in __clk_init_parent()
2826 index = core->ops->get_parent(core->hw); in __clk_init_parent()
2828 return clk_core_get_parent_by_index(core, index); in __clk_init_parent()
2831 static void clk_core_reparent(struct clk_core *core, in clk_core_reparent() argument
2834 clk_reparent(core, new_parent); in clk_core_reparent()
2835 __clk_recalc_accuracies(core); in clk_core_reparent()
2836 __clk_recalc_rates(core, true, POST_RATE_CHANGE); in clk_core_reparent()
2844 clk_core_reparent(hw->core, !new_parent ? NULL : new_parent->core); in clk_hw_reparent()
2863 return clk_core_has_parent(clk->core, parent->core); in clk_has_parent()
2867 static int clk_core_set_parent_nolock(struct clk_core *core, in clk_core_set_parent_nolock() argument
2876 if (!core) in clk_core_set_parent_nolock()
2879 if (core->parent == parent) in clk_core_set_parent_nolock()
2883 if (core->num_parents > 1 && !core->ops->set_parent) in clk_core_set_parent_nolock()
2887 if ((core->flags & CLK_SET_PARENT_GATE) && core->prepare_count) in clk_core_set_parent_nolock()
2890 if (clk_core_rate_is_protected(core)) in clk_core_set_parent_nolock()
2895 p_index = clk_fetch_parent_index(core, parent); in clk_core_set_parent_nolock()
2898 __func__, parent->name, core->name); in clk_core_set_parent_nolock()
2904 ret = clk_pm_runtime_get(core); in clk_core_set_parent_nolock()
2909 ret = __clk_speculate_rates(core, p_rate); in clk_core_set_parent_nolock()
2916 ret = __clk_set_parent(core, parent, p_index); in clk_core_set_parent_nolock()
2920 __clk_recalc_rates(core, true, ABORT_RATE_CHANGE); in clk_core_set_parent_nolock()
2922 __clk_recalc_rates(core, true, POST_RATE_CHANGE); in clk_core_set_parent_nolock()
2923 __clk_recalc_accuracies(core); in clk_core_set_parent_nolock()
2927 clk_pm_runtime_put(core); in clk_core_set_parent_nolock()
2934 return clk_core_set_parent_nolock(hw->core, parent->core); in clk_hw_set_parent()
2965 clk_core_rate_unprotect(clk->core); in clk_set_parent()
2967 ret = clk_core_set_parent_nolock(clk->core, in clk_set_parent()
2968 parent ? parent->core : NULL); in clk_set_parent()
2971 clk_core_rate_protect(clk->core); in clk_set_parent()
2979 static int clk_core_set_phase_nolock(struct clk_core *core, int degrees) in clk_core_set_phase_nolock() argument
2985 if (!core) in clk_core_set_phase_nolock()
2988 if (clk_core_rate_is_protected(core)) in clk_core_set_phase_nolock()
2991 trace_clk_set_phase(core, degrees); in clk_core_set_phase_nolock()
2993 if (core->ops->set_phase) { in clk_core_set_phase_nolock()
2994 ret = core->ops->set_phase(core->hw, degrees); in clk_core_set_phase_nolock()
2996 core->phase = degrees; in clk_core_set_phase_nolock()
2999 trace_clk_set_phase_complete(core, degrees); in clk_core_set_phase_nolock()
3039 clk_core_rate_unprotect(clk->core); in clk_set_phase()
3041 ret = clk_core_set_phase_nolock(clk->core, degrees); in clk_set_phase()
3044 clk_core_rate_protect(clk->core); in clk_set_phase()
3052 static int clk_core_get_phase(struct clk_core *core) in clk_core_get_phase() argument
3057 if (!core->ops->get_phase) in clk_core_get_phase()
3061 ret = core->ops->get_phase(core->hw); in clk_core_get_phase()
3063 core->phase = ret; in clk_core_get_phase()
3083 ret = clk_core_get_phase(clk->core); in clk_get_phase()
3090 static void clk_core_reset_duty_cycle_nolock(struct clk_core *core) in clk_core_reset_duty_cycle_nolock() argument
3093 core->duty.num = 1; in clk_core_reset_duty_cycle_nolock()
3094 core->duty.den = 2; in clk_core_reset_duty_cycle_nolock()
3097 static int clk_core_update_duty_cycle_parent_nolock(struct clk_core *core);
3099 static int clk_core_update_duty_cycle_nolock(struct clk_core *core) in clk_core_update_duty_cycle_nolock() argument
3101 struct clk_duty *duty = &core->duty; in clk_core_update_duty_cycle_nolock()
3104 if (!core->ops->get_duty_cycle) in clk_core_update_duty_cycle_nolock()
3105 return clk_core_update_duty_cycle_parent_nolock(core); in clk_core_update_duty_cycle_nolock()
3107 ret = core->ops->get_duty_cycle(core->hw, duty); in clk_core_update_duty_cycle_nolock()
3120 clk_core_reset_duty_cycle_nolock(core); in clk_core_update_duty_cycle_nolock()
3124 static int clk_core_update_duty_cycle_parent_nolock(struct clk_core *core) in clk_core_update_duty_cycle_parent_nolock() argument
3128 if (core->parent && in clk_core_update_duty_cycle_parent_nolock()
3129 core->flags & CLK_DUTY_CYCLE_PARENT) { in clk_core_update_duty_cycle_parent_nolock()
3130 ret = clk_core_update_duty_cycle_nolock(core->parent); in clk_core_update_duty_cycle_parent_nolock()
3131 memcpy(&core->duty, &core->parent->duty, sizeof(core->duty)); in clk_core_update_duty_cycle_parent_nolock()
3133 clk_core_reset_duty_cycle_nolock(core); in clk_core_update_duty_cycle_parent_nolock()
3139 static int clk_core_set_duty_cycle_parent_nolock(struct clk_core *core,
3142 static int clk_core_set_duty_cycle_nolock(struct clk_core *core, in clk_core_set_duty_cycle_nolock() argument
3149 if (clk_core_rate_is_protected(core)) in clk_core_set_duty_cycle_nolock()
3152 trace_clk_set_duty_cycle(core, duty); in clk_core_set_duty_cycle_nolock()
3154 if (!core->ops->set_duty_cycle) in clk_core_set_duty_cycle_nolock()
3155 return clk_core_set_duty_cycle_parent_nolock(core, duty); in clk_core_set_duty_cycle_nolock()
3157 ret = core->ops->set_duty_cycle(core->hw, duty); in clk_core_set_duty_cycle_nolock()
3159 memcpy(&core->duty, duty, sizeof(*duty)); in clk_core_set_duty_cycle_nolock()
3161 trace_clk_set_duty_cycle_complete(core, duty); in clk_core_set_duty_cycle_nolock()
3166 static int clk_core_set_duty_cycle_parent_nolock(struct clk_core *core, in clk_core_set_duty_cycle_parent_nolock() argument
3171 if (core->parent && in clk_core_set_duty_cycle_parent_nolock()
3172 core->flags & (CLK_DUTY_CYCLE_PARENT | CLK_SET_RATE_PARENT)) { in clk_core_set_duty_cycle_parent_nolock()
3173 ret = clk_core_set_duty_cycle_nolock(core->parent, duty); in clk_core_set_duty_cycle_parent_nolock()
3174 memcpy(&core->duty, &core->parent->duty, sizeof(core->duty)); in clk_core_set_duty_cycle_parent_nolock()
3209 clk_core_rate_unprotect(clk->core); in clk_set_duty_cycle()
3211 ret = clk_core_set_duty_cycle_nolock(clk->core, &duty); in clk_set_duty_cycle()
3214 clk_core_rate_protect(clk->core); in clk_set_duty_cycle()
3222 static int clk_core_get_scaled_duty_cycle(struct clk_core *core, in clk_core_get_scaled_duty_cycle() argument
3225 struct clk_duty *duty = &core->duty; in clk_core_get_scaled_duty_cycle()
3230 ret = clk_core_update_duty_cycle_nolock(core); in clk_core_get_scaled_duty_cycle()
3252 return clk_core_get_scaled_duty_cycle(clk->core, scale); in clk_get_scaled_duty_cycle()
3273 /* true if clk->core pointers match. Avoid dereferencing garbage */ in clk_is_match()
3275 if (p->core == q->core) in clk_is_match()
3454 struct clk_core *core = data; in clk_rate_set() local
3458 ret = clk_core_set_rate_nolock(core, val); in clk_rate_set()
3468 struct clk_core *core = data; in clk_phase_set() local
3473 ret = clk_core_set_phase_nolock(core, degrees); in clk_phase_set()
3483 struct clk_core *core = data; in clk_prepare_enable_set() local
3487 ret = clk_prepare_enable(core->hw->clk); in clk_prepare_enable_set()
3489 clk_disable_unprepare(core->hw->clk); in clk_prepare_enable_set()
3496 struct clk_core *core = data; in clk_prepare_enable_get() local
3498 *val = core->enable_count && core->prepare_count; in clk_prepare_enable_get()
3515 struct clk_core *core = data; in clk_rate_get() local
3518 *val = clk_core_get_rate_recalc(core); in clk_rate_get()
3528 struct clk_core *core = data; in clk_phase_get() local
3530 *val = core->phase; in clk_phase_get()
3558 struct clk_core *core = s->private; in clk_flags_show() local
3559 unsigned long flags = core->flags; in clk_flags_show()
3577 static void possible_parent_show(struct seq_file *s, struct clk_core *core, in possible_parent_show() argument
3595 parent = clk_core_get_parent_by_index(core, i); in possible_parent_show()
3598 } else if (core->parents[i].name) { in possible_parent_show()
3599 seq_puts(s, core->parents[i].name); in possible_parent_show()
3600 } else if (core->parents[i].fw_name) { in possible_parent_show()
3601 seq_printf(s, "<%s>(fw)", core->parents[i].fw_name); in possible_parent_show()
3603 if (core->parents[i].index >= 0) in possible_parent_show()
3604 name = of_clk_get_parent_name(core->of_node, core->parents[i].index); in possible_parent_show()
3616 struct clk_core *core = s->private; in possible_parents_show() local
3619 for (i = 0; i < core->num_parents - 1; i++) in possible_parents_show()
3620 possible_parent_show(s, core, i, ' '); in possible_parents_show()
3622 possible_parent_show(s, core, i, '\n'); in possible_parents_show()
3630 struct clk_core *core = s->private; in current_parent_show() local
3632 if (core->parent) in current_parent_show()
3633 seq_printf(s, "%s\n", core->parent->name); in current_parent_show()
3644 struct clk_core *core = s->private; in current_parent_write() local
3653 parent = clk_core_get_parent_by_index(core, idx); in current_parent_write()
3658 err = clk_core_set_parent_nolock(core, parent); in current_parent_write()
3677 struct clk_core *core = s->private; in clk_duty_cycle_show() local
3678 struct clk_duty *duty = &core->duty; in clk_duty_cycle_show()
3688 struct clk_core *core = s->private; in clk_min_rate_show() local
3692 clk_core_get_boundaries(core, &min_rate, &max_rate); in clk_min_rate_show()
3702 struct clk_core *core = s->private; in clk_max_rate_show() local
3706 clk_core_get_boundaries(core, &min_rate, &max_rate); in clk_max_rate_show()
3714 static void clk_debug_create_one(struct clk_core *core, struct dentry *pdentry) in clk_debug_create_one() argument
3718 if (!core || !pdentry) in clk_debug_create_one()
3721 root = debugfs_create_dir(core->name, pdentry); in clk_debug_create_one()
3722 core->dentry = root; in clk_debug_create_one()
3724 debugfs_create_file("clk_rate", clk_rate_mode, root, core, in clk_debug_create_one()
3726 debugfs_create_file("clk_min_rate", 0444, root, core, &clk_min_rate_fops); in clk_debug_create_one()
3727 debugfs_create_file("clk_max_rate", 0444, root, core, &clk_max_rate_fops); in clk_debug_create_one()
3728 debugfs_create_ulong("clk_accuracy", 0444, root, &core->accuracy); in clk_debug_create_one()
3729 debugfs_create_file("clk_phase", clk_phase_mode, root, core, in clk_debug_create_one()
3731 debugfs_create_file("clk_flags", 0444, root, core, &clk_flags_fops); in clk_debug_create_one()
3732 debugfs_create_u32("clk_prepare_count", 0444, root, &core->prepare_count); in clk_debug_create_one()
3733 debugfs_create_u32("clk_enable_count", 0444, root, &core->enable_count); in clk_debug_create_one()
3734 debugfs_create_u32("clk_protect_count", 0444, root, &core->protect_count); in clk_debug_create_one()
3735 debugfs_create_u32("clk_notifier_count", 0444, root, &core->notifier_count); in clk_debug_create_one()
3736 debugfs_create_file("clk_duty_cycle", 0444, root, core, in clk_debug_create_one()
3739 debugfs_create_file("clk_prepare_enable", 0644, root, core, in clk_debug_create_one()
3742 if (core->num_parents > 1) in clk_debug_create_one()
3743 debugfs_create_file("clk_parent", 0644, root, core, in clk_debug_create_one()
3747 if (core->num_parents > 0) in clk_debug_create_one()
3748 debugfs_create_file("clk_parent", 0444, root, core, in clk_debug_create_one()
3751 if (core->num_parents > 1) in clk_debug_create_one()
3752 debugfs_create_file("clk_possible_parents", 0444, root, core, in clk_debug_create_one()
3755 if (core->ops->debug_init) in clk_debug_create_one()
3756 core->ops->debug_init(core->hw, core->dentry); in clk_debug_create_one()
3761 * @core: the clk being added to the debugfs clk directory
3767 static void clk_debug_register(struct clk_core *core) in clk_debug_register() argument
3770 hlist_add_head(&core->debug_node, &clk_debug_list); in clk_debug_register()
3772 clk_debug_create_one(core, rootdir); in clk_debug_register()
3778 * @core: the clk being removed from the debugfs clk directory
3784 static void clk_debug_unregister(struct clk_core *core) in clk_debug_unregister() argument
3787 hlist_del_init(&core->debug_node); in clk_debug_unregister()
3788 debugfs_remove_recursive(core->dentry); in clk_debug_unregister()
3789 core->dentry = NULL; in clk_debug_unregister()
3804 struct clk_core *core; in clk_debug_init() local
3836 hlist_for_each_entry(core, &clk_debug_list, debug_node) in clk_debug_init()
3837 clk_debug_create_one(core, rootdir); in clk_debug_init()
3846 static inline void clk_debug_register(struct clk_core *core) { } in clk_debug_register() argument
3847 static inline void clk_debug_unregister(struct clk_core *core) in clk_debug_unregister() argument
3895 * @core: clk_core being initialized
3900 static int __clk_core_init(struct clk_core *core) in __clk_core_init() argument
3910 * Set hw->core after grabbing the prepare_lock to synchronize with in __clk_core_init()
3911 * callers of clk_core_fill_parent_index() where we treat hw->core in __clk_core_init()
3915 core->hw->core = core; in __clk_core_init()
3917 ret = clk_pm_runtime_get(core); in __clk_core_init()
3922 if (clk_core_lookup(core->name)) { in __clk_core_init()
3924 __func__, core->name); in __clk_core_init()
3930 if (core->ops->set_rate && in __clk_core_init()
3931 !((core->ops->round_rate || core->ops->determine_rate) && in __clk_core_init()
3932 core->ops->recalc_rate)) { in __clk_core_init()
3934 __func__, core->name); in __clk_core_init()
3939 if (core->ops->set_parent && !core->ops->get_parent) { in __clk_core_init()
3941 __func__, core->name); in __clk_core_init()
3946 if (core->ops->set_parent && !core->ops->determine_rate) { in __clk_core_init()
3948 __func__, core->name); in __clk_core_init()
3953 if (core->num_parents > 1 && !core->ops->get_parent) { in __clk_core_init()
3955 __func__, core->name); in __clk_core_init()
3960 if (core->ops->set_rate_and_parent && in __clk_core_init()
3961 !(core->ops->set_parent && core->ops->set_rate)) { in __clk_core_init()
3963 __func__, core->name); in __clk_core_init()
3982 if (core->ops->init) { in __clk_core_init()
3983 ret = core->ops->init(core->hw); in __clk_core_init()
3988 parent = core->parent = __clk_init_parent(core); in __clk_core_init()
3991 * Populate core->parent if parent has already been clk_core_init'd. If in __clk_core_init()
4001 hlist_add_head(&core->child_node, &parent->children); in __clk_core_init()
4002 core->orphan = parent->orphan; in __clk_core_init()
4003 } else if (!core->num_parents) { in __clk_core_init()
4004 hlist_add_head(&core->child_node, &clk_root_list); in __clk_core_init()
4005 core->orphan = false; in __clk_core_init()
4007 hlist_add_head(&core->child_node, &clk_orphan_list); in __clk_core_init()
4008 core->orphan = true; in __clk_core_init()
4018 if (core->ops->recalc_accuracy) in __clk_core_init()
4019 core->accuracy = core->ops->recalc_accuracy(core->hw, in __clk_core_init()
4022 core->accuracy = parent->accuracy; in __clk_core_init()
4024 core->accuracy = 0; in __clk_core_init()
4031 phase = clk_core_get_phase(core); in __clk_core_init()
4035 core->name); in __clk_core_init()
4042 clk_core_update_duty_cycle_nolock(core); in __clk_core_init()
4050 if (core->ops->recalc_rate) in __clk_core_init()
4051 rate = core->ops->recalc_rate(core->hw, in __clk_core_init()
4057 core->rate = core->req_rate = rate; in __clk_core_init()
4064 if (core->flags & CLK_IS_CRITICAL) { in __clk_core_init()
4065 ret = clk_core_prepare(core); in __clk_core_init()
4068 __func__, core->name); in __clk_core_init()
4072 ret = clk_core_enable_lock(core); in __clk_core_init()
4075 __func__, core->name); in __clk_core_init()
4076 clk_core_unprepare(core); in __clk_core_init()
4083 clk_pm_runtime_put(core); in __clk_core_init()
4086 hlist_del_init(&core->child_node); in __clk_core_init()
4087 core->hw->core = NULL; in __clk_core_init()
4093 clk_debug_register(core); in __clk_core_init()
4100 * @core: clk to add consumer to
4103 static void clk_core_link_consumer(struct clk_core *core, struct clk *clk) in clk_core_link_consumer() argument
4106 hlist_add_head(&clk->clks_node, &core->clks); in clk_core_link_consumer()
4122 * @core: clk to allocate a consumer for
4128 static struct clk *alloc_clk(struct clk_core *core, const char *dev_id, in alloc_clk() argument
4137 clk->core = core; in alloc_clk()
4174 struct clk_core *core; in clk_hw_create_clk() local
4180 core = hw->core; in clk_hw_create_clk()
4181 clk = alloc_clk(core, dev_id, con_id); in clk_hw_create_clk()
4186 if (!try_module_get(core->owner)) { in clk_hw_create_clk()
4191 kref_get(&core->ref); in clk_hw_create_clk()
4192 clk_core_link_consumer(core, clk); in clk_hw_create_clk()
4209 struct device *dev = hw->core->dev; in clk_hw_get_clk()
4233 static int clk_core_populate_parent_map(struct clk_core *core, in clk_core_populate_parent_map() argument
4251 core->parents = parents; in clk_core_populate_parent_map()
4262 __func__, core->name); in clk_core_populate_parent_map()
4295 static void clk_core_free_parent_map(struct clk_core *core) in clk_core_free_parent_map() argument
4297 int i = core->num_parents; in clk_core_free_parent_map()
4299 if (!core->num_parents) in clk_core_free_parent_map()
4303 kfree_const(core->parents[i].name); in clk_core_free_parent_map()
4304 kfree_const(core->parents[i].fw_name); in clk_core_free_parent_map()
4307 kfree(core->parents); in clk_core_free_parent_map()
4313 struct clk_core *core = container_of(ref, struct clk_core, ref); in __clk_release() local
4315 if (core->rpm_enabled) { in __clk_release()
4317 hlist_del(&core->rpm_node); in __clk_release()
4321 clk_core_free_parent_map(core); in __clk_release()
4322 kfree_const(core->name); in __clk_release()
4323 kfree(core); in __clk_release()
4330 struct clk_core *core; in __clk_register() local
4336 * we catch use of hw->init early on in the core. in __clk_register()
4340 core = kzalloc(sizeof(*core), GFP_KERNEL); in __clk_register()
4341 if (!core) { in __clk_register()
4346 kref_init(&core->ref); in __clk_register()
4348 core->name = kstrdup_const(init->name, GFP_KERNEL); in __clk_register()
4349 if (!core->name) { in __clk_register()
4358 core->ops = init->ops; in __clk_register()
4360 core->dev = dev; in __clk_register()
4361 clk_pm_runtime_init(core); in __clk_register()
4362 core->of_node = np; in __clk_register()
4364 core->owner = dev->driver->owner; in __clk_register()
4365 core->hw = hw; in __clk_register()
4366 core->flags = init->flags; in __clk_register()
4367 core->num_parents = init->num_parents; in __clk_register()
4368 core->min_rate = 0; in __clk_register()
4369 core->max_rate = ULONG_MAX; in __clk_register()
4371 ret = clk_core_populate_parent_map(core, init); in __clk_register()
4375 INIT_HLIST_HEAD(&core->clks); in __clk_register()
4381 hw->clk = alloc_clk(core, NULL, NULL); in __clk_register()
4387 clk_core_link_consumer(core, hw->clk); in __clk_register()
4389 ret = __clk_core_init(core); in __clk_register()
4404 kref_put(&core->ref, __clk_release); in __clk_register()
4533 if (root->parents[i].core == target) in clk_core_evict_parent_cache_subtree()
4534 root->parents[i].core = NULL; in clk_core_evict_parent_cache_subtree()
4541 static void clk_core_evict_parent_cache(struct clk_core *core) in clk_core_evict_parent_cache() argument
4550 clk_core_evict_parent_cache_subtree(root, core); in clk_core_evict_parent_cache()
4566 clk_debug_unregister(clk->core); in clk_unregister()
4570 ops = clk->core->ops; in clk_unregister()
4573 clk->core->name); in clk_unregister()
4582 clk->core->ops = &clk_nodrv_ops; in clk_unregister()
4586 ops->terminate(clk->core->hw); in clk_unregister()
4588 if (!hlist_empty(&clk->core->children)) { in clk_unregister()
4593 hlist_for_each_entry_safe(child, t, &clk->core->children, in clk_unregister()
4598 clk_core_evict_parent_cache(clk->core); in clk_unregister()
4600 hlist_del_init(&clk->core->child_node); in clk_unregister()
4602 if (clk->core->prepare_count) in clk_unregister()
4604 __func__, clk->core->name); in clk_unregister()
4606 if (clk->core->protect_count) in clk_unregister()
4608 __func__, clk->core->name); in clk_unregister()
4611 kref_put(&clk->core->ref, __clk_release); in clk_unregister()
4722 WARN_ON_ONCE(dev != hw->core->dev); in devm_clk_hw_get_clk()
4760 clk->core->protect_count -= (clk->exclusive_count - 1); in __clk_put()
4761 clk_core_rate_unprotect(clk->core); in __clk_put()
4773 owner = clk->core->owner; in __clk_put()
4774 kref_put(&clk->core->ref, __clk_release); in __clk_put()
4829 clk->core->notifier_count++; in clk_notifier_register()
4863 clk->core->notifier_count--; in clk_notifier_unregister()