clk-composite.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (c) 2013 NVIDIA CORPORATION. All rights reserved.
  4. */
  5. #include <linux/clk-provider.h>
  6. #include <linux/device.h>
  7. #include <linux/err.h>
  8. #include <linux/slab.h>
  9. static u8 clk_composite_get_parent(struct clk_hw *hw)
  10. {
  11. struct clk_composite *composite = to_clk_composite(hw);
  12. const struct clk_ops *mux_ops = composite->mux_ops;
  13. struct clk_hw *mux_hw = composite->mux_hw;
  14. __clk_hw_set_clk(mux_hw, hw);
  15. return mux_ops->get_parent(mux_hw);
  16. }
  17. static int clk_composite_set_parent(struct clk_hw *hw, u8 index)
  18. {
  19. struct clk_composite *composite = to_clk_composite(hw);
  20. const struct clk_ops *mux_ops = composite->mux_ops;
  21. struct clk_hw *mux_hw = composite->mux_hw;
  22. __clk_hw_set_clk(mux_hw, hw);
  23. return mux_ops->set_parent(mux_hw, index);
  24. }
  25. static unsigned long clk_composite_recalc_rate(struct clk_hw *hw,
  26. unsigned long parent_rate)
  27. {
  28. struct clk_composite *composite = to_clk_composite(hw);
  29. const struct clk_ops *rate_ops = composite->rate_ops;
  30. struct clk_hw *rate_hw = composite->rate_hw;
  31. __clk_hw_set_clk(rate_hw, hw);
  32. return rate_ops->recalc_rate(rate_hw, parent_rate);
  33. }
  34. static int clk_composite_determine_rate_for_parent(struct clk_hw *rate_hw,
  35. struct clk_rate_request *req,
  36. struct clk_hw *parent_hw,
  37. const struct clk_ops *rate_ops)
  38. {
  39. long rate;
  40. req->best_parent_hw = parent_hw;
  41. req->best_parent_rate = clk_hw_get_rate(parent_hw);
  42. if (rate_ops->determine_rate)
  43. return rate_ops->determine_rate(rate_hw, req);
  44. rate = rate_ops->round_rate(rate_hw, req->rate,
  45. &req->best_parent_rate);
  46. if (rate < 0)
  47. return rate;
  48. req->rate = rate;
  49. return 0;
  50. }
  51. static int clk_composite_determine_rate(struct clk_hw *hw,
  52. struct clk_rate_request *req)
  53. {
  54. struct clk_composite *composite = to_clk_composite(hw);
  55. const struct clk_ops *rate_ops = composite->rate_ops;
  56. const struct clk_ops *mux_ops = composite->mux_ops;
  57. struct clk_hw *rate_hw = composite->rate_hw;
  58. struct clk_hw *mux_hw = composite->mux_hw;
  59. struct clk_hw *parent;
  60. unsigned long rate_diff;
  61. unsigned long best_rate_diff = ULONG_MAX;
  62. unsigned long best_rate = 0;
  63. int i, ret;
  64. if (rate_hw && rate_ops &&
  65. (rate_ops->determine_rate || rate_ops->round_rate) &&
  66. mux_hw && mux_ops && mux_ops->set_parent) {
  67. req->best_parent_hw = NULL;
  68. if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
  69. struct clk_rate_request tmp_req;
  70. parent = clk_hw_get_parent(mux_hw);
  71. clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
  72. ret = clk_composite_determine_rate_for_parent(rate_hw,
  73. &tmp_req,
  74. parent,
  75. rate_ops);
  76. if (ret)
  77. return ret;
  78. req->rate = tmp_req.rate;
  79. req->best_parent_hw = tmp_req.best_parent_hw;
  80. req->best_parent_rate = tmp_req.best_parent_rate;
  81. return 0;
  82. }
  83. for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) {
  84. struct clk_rate_request tmp_req;
  85. parent = clk_hw_get_parent_by_index(mux_hw, i);
  86. if (!parent)
  87. continue;
  88. clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
  89. ret = clk_composite_determine_rate_for_parent(rate_hw,
  90. &tmp_req,
  91. parent,
  92. rate_ops);
  93. if (ret)
  94. continue;
  95. if (req->rate >= tmp_req.rate)
  96. rate_diff = req->rate - tmp_req.rate;
  97. else
  98. rate_diff = tmp_req.rate - req->rate;
  99. if (!rate_diff || !req->best_parent_hw
  100. || best_rate_diff > rate_diff) {
  101. req->best_parent_hw = parent;
  102. req->best_parent_rate = tmp_req.best_parent_rate;
  103. best_rate_diff = rate_diff;
  104. best_rate = tmp_req.rate;
  105. }
  106. if (!rate_diff)
  107. return 0;
  108. }
  109. req->rate = best_rate;
  110. return 0;
  111. } else if (rate_hw && rate_ops && rate_ops->determine_rate) {
  112. __clk_hw_set_clk(rate_hw, hw);
  113. return rate_ops->determine_rate(rate_hw, req);
  114. } else if (mux_hw && mux_ops && mux_ops->determine_rate) {
  115. __clk_hw_set_clk(mux_hw, hw);
  116. return mux_ops->determine_rate(mux_hw, req);
  117. } else {
  118. pr_err("clk: clk_composite_determine_rate function called, but no mux or rate callback set!\n");
  119. return -EINVAL;
  120. }
  121. }
  122. static long clk_composite_round_rate(struct clk_hw *hw, unsigned long rate,
  123. unsigned long *prate)
  124. {
  125. struct clk_composite *composite = to_clk_composite(hw);
  126. const struct clk_ops *rate_ops = composite->rate_ops;
  127. struct clk_hw *rate_hw = composite->rate_hw;
  128. __clk_hw_set_clk(rate_hw, hw);
  129. return rate_ops->round_rate(rate_hw, rate, prate);
  130. }
  131. static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate,
  132. unsigned long parent_rate)
  133. {
  134. struct clk_composite *composite = to_clk_composite(hw);
  135. const struct clk_ops *rate_ops = composite->rate_ops;
  136. struct clk_hw *rate_hw = composite->rate_hw;
  137. __clk_hw_set_clk(rate_hw, hw);
  138. return rate_ops->set_rate(rate_hw, rate, parent_rate);
  139. }
  140. static int clk_composite_set_rate_and_parent(struct clk_hw *hw,
  141. unsigned long rate,
  142. unsigned long parent_rate,
  143. u8 index)
  144. {
  145. struct clk_composite *composite = to_clk_composite(hw);
  146. const struct clk_ops *rate_ops = composite->rate_ops;
  147. const struct clk_ops *mux_ops = composite->mux_ops;
  148. struct clk_hw *rate_hw = composite->rate_hw;
  149. struct clk_hw *mux_hw = composite->mux_hw;
  150. unsigned long temp_rate;
  151. __clk_hw_set_clk(rate_hw, hw);
  152. __clk_hw_set_clk(mux_hw, hw);
  153. temp_rate = rate_ops->recalc_rate(rate_hw, parent_rate);
  154. if (temp_rate > rate) {
  155. rate_ops->set_rate(rate_hw, rate, parent_rate);
  156. mux_ops->set_parent(mux_hw, index);
  157. } else {
  158. mux_ops->set_parent(mux_hw, index);
  159. rate_ops->set_rate(rate_hw, rate, parent_rate);
  160. }
  161. return 0;
  162. }
  163. static int clk_composite_is_enabled(struct clk_hw *hw)
  164. {
  165. struct clk_composite *composite = to_clk_composite(hw);
  166. const struct clk_ops *gate_ops = composite->gate_ops;
  167. struct clk_hw *gate_hw = composite->gate_hw;
  168. __clk_hw_set_clk(gate_hw, hw);
  169. return gate_ops->is_enabled(gate_hw);
  170. }
  171. static int clk_composite_enable(struct clk_hw *hw)
  172. {
  173. struct clk_composite *composite = to_clk_composite(hw);
  174. const struct clk_ops *gate_ops = composite->gate_ops;
  175. struct clk_hw *gate_hw = composite->gate_hw;
  176. __clk_hw_set_clk(gate_hw, hw);
  177. return gate_ops->enable(gate_hw);
  178. }
  179. static void clk_composite_disable(struct clk_hw *hw)
  180. {
  181. struct clk_composite *composite = to_clk_composite(hw);
  182. const struct clk_ops *gate_ops = composite->gate_ops;
  183. struct clk_hw *gate_hw = composite->gate_hw;
  184. __clk_hw_set_clk(gate_hw, hw);
  185. gate_ops->disable(gate_hw);
  186. }
  187. static struct clk_hw *__clk_hw_register_composite(struct device *dev,
  188. const char *name, const char * const *parent_names,
  189. const struct clk_parent_data *pdata, int num_parents,
  190. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  191. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  192. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  193. unsigned long flags)
  194. {
  195. struct clk_hw *hw;
  196. struct clk_init_data init = {};
  197. struct clk_composite *composite;
  198. struct clk_ops *clk_composite_ops;
  199. int ret;
  200. composite = kzalloc(sizeof(*composite), GFP_KERNEL);
  201. if (!composite)
  202. return ERR_PTR(-ENOMEM);
  203. init.name = name;
  204. init.flags = flags;
  205. if (parent_names)
  206. init.parent_names = parent_names;
  207. else
  208. init.parent_data = pdata;
  209. init.num_parents = num_parents;
  210. hw = &composite->hw;
  211. clk_composite_ops = &composite->ops;
  212. if (mux_hw && mux_ops) {
  213. if (!mux_ops->get_parent) {
  214. hw = ERR_PTR(-EINVAL);
  215. goto err;
  216. }
  217. composite->mux_hw = mux_hw;
  218. composite->mux_ops = mux_ops;
  219. clk_composite_ops->get_parent = clk_composite_get_parent;
  220. if (mux_ops->set_parent)
  221. clk_composite_ops->set_parent = clk_composite_set_parent;
  222. if (mux_ops->determine_rate)
  223. clk_composite_ops->determine_rate = clk_composite_determine_rate;
  224. }
  225. if (rate_hw && rate_ops) {
  226. if (!rate_ops->recalc_rate) {
  227. hw = ERR_PTR(-EINVAL);
  228. goto err;
  229. }
  230. clk_composite_ops->recalc_rate = clk_composite_recalc_rate;
  231. if (rate_ops->determine_rate)
  232. clk_composite_ops->determine_rate =
  233. clk_composite_determine_rate;
  234. else if (rate_ops->round_rate)
  235. clk_composite_ops->round_rate =
  236. clk_composite_round_rate;
  237. /* .set_rate requires either .round_rate or .determine_rate */
  238. if (rate_ops->set_rate) {
  239. if (rate_ops->determine_rate || rate_ops->round_rate)
  240. clk_composite_ops->set_rate =
  241. clk_composite_set_rate;
  242. else
  243. WARN(1, "%s: missing round_rate op is required\n",
  244. __func__);
  245. }
  246. composite->rate_hw = rate_hw;
  247. composite->rate_ops = rate_ops;
  248. }
  249. if (mux_hw && mux_ops && rate_hw && rate_ops) {
  250. if (mux_ops->set_parent && rate_ops->set_rate)
  251. clk_composite_ops->set_rate_and_parent =
  252. clk_composite_set_rate_and_parent;
  253. }
  254. if (gate_hw && gate_ops) {
  255. if (!gate_ops->is_enabled || !gate_ops->enable ||
  256. !gate_ops->disable) {
  257. hw = ERR_PTR(-EINVAL);
  258. goto err;
  259. }
  260. composite->gate_hw = gate_hw;
  261. composite->gate_ops = gate_ops;
  262. clk_composite_ops->is_enabled = clk_composite_is_enabled;
  263. clk_composite_ops->enable = clk_composite_enable;
  264. clk_composite_ops->disable = clk_composite_disable;
  265. }
  266. init.ops = clk_composite_ops;
  267. composite->hw.init = &init;
  268. ret = clk_hw_register(dev, hw);
  269. if (ret) {
  270. hw = ERR_PTR(ret);
  271. goto err;
  272. }
  273. if (composite->mux_hw)
  274. composite->mux_hw->clk = hw->clk;
  275. if (composite->rate_hw)
  276. composite->rate_hw->clk = hw->clk;
  277. if (composite->gate_hw)
  278. composite->gate_hw->clk = hw->clk;
  279. return hw;
  280. err:
  281. kfree(composite);
  282. return hw;
  283. }
  284. struct clk_hw *clk_hw_register_composite(struct device *dev, const char *name,
  285. const char * const *parent_names, int num_parents,
  286. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  287. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  288. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  289. unsigned long flags)
  290. {
  291. return __clk_hw_register_composite(dev, name, parent_names, NULL,
  292. num_parents, mux_hw, mux_ops,
  293. rate_hw, rate_ops, gate_hw,
  294. gate_ops, flags);
  295. }
  296. EXPORT_SYMBOL_GPL(clk_hw_register_composite);
  297. struct clk_hw *clk_hw_register_composite_pdata(struct device *dev,
  298. const char *name,
  299. const struct clk_parent_data *parent_data,
  300. int num_parents,
  301. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  302. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  303. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  304. unsigned long flags)
  305. {
  306. return __clk_hw_register_composite(dev, name, NULL, parent_data,
  307. num_parents, mux_hw, mux_ops,
  308. rate_hw, rate_ops, gate_hw,
  309. gate_ops, flags);
  310. }
  311. struct clk *clk_register_composite(struct device *dev, const char *name,
  312. const char * const *parent_names, int num_parents,
  313. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  314. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  315. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  316. unsigned long flags)
  317. {
  318. struct clk_hw *hw;
  319. hw = clk_hw_register_composite(dev, name, parent_names, num_parents,
  320. mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops,
  321. flags);
  322. if (IS_ERR(hw))
  323. return ERR_CAST(hw);
  324. return hw->clk;
  325. }
  326. EXPORT_SYMBOL_GPL(clk_register_composite);
  327. struct clk *clk_register_composite_pdata(struct device *dev, const char *name,
  328. const struct clk_parent_data *parent_data,
  329. int num_parents,
  330. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  331. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  332. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  333. unsigned long flags)
  334. {
  335. struct clk_hw *hw;
  336. hw = clk_hw_register_composite_pdata(dev, name, parent_data,
  337. num_parents, mux_hw, mux_ops, rate_hw, rate_ops,
  338. gate_hw, gate_ops, flags);
  339. if (IS_ERR(hw))
  340. return ERR_CAST(hw);
  341. return hw->clk;
  342. }
  343. void clk_unregister_composite(struct clk *clk)
  344. {
  345. struct clk_composite *composite;
  346. struct clk_hw *hw;
  347. hw = __clk_get_hw(clk);
  348. if (!hw)
  349. return;
  350. composite = to_clk_composite(hw);
  351. clk_unregister(clk);
  352. kfree(composite);
  353. }
  354. void clk_hw_unregister_composite(struct clk_hw *hw)
  355. {
  356. struct clk_composite *composite;
  357. composite = to_clk_composite(hw);
  358. clk_hw_unregister(hw);
  359. kfree(composite);
  360. }
  361. EXPORT_SYMBOL_GPL(clk_hw_unregister_composite);
  362. static void devm_clk_hw_release_composite(struct device *dev, void *res)
  363. {
  364. clk_hw_unregister_composite(*(struct clk_hw **)res);
  365. }
  366. static struct clk_hw *__devm_clk_hw_register_composite(struct device *dev,
  367. const char *name, const char * const *parent_names,
  368. const struct clk_parent_data *pdata, int num_parents,
  369. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  370. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  371. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  372. unsigned long flags)
  373. {
  374. struct clk_hw **ptr, *hw;
  375. ptr = devres_alloc(devm_clk_hw_release_composite, sizeof(*ptr),
  376. GFP_KERNEL);
  377. if (!ptr)
  378. return ERR_PTR(-ENOMEM);
  379. hw = __clk_hw_register_composite(dev, name, parent_names, pdata,
  380. num_parents, mux_hw, mux_ops, rate_hw,
  381. rate_ops, gate_hw, gate_ops, flags);
  382. if (!IS_ERR(hw)) {
  383. *ptr = hw;
  384. devres_add(dev, ptr);
  385. } else {
  386. devres_free(ptr);
  387. }
  388. return hw;
  389. }
  390. struct clk_hw *devm_clk_hw_register_composite_pdata(struct device *dev,
  391. const char *name,
  392. const struct clk_parent_data *parent_data,
  393. int num_parents,
  394. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  395. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  396. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  397. unsigned long flags)
  398. {
  399. return __devm_clk_hw_register_composite(dev, name, NULL, parent_data,
  400. num_parents, mux_hw, mux_ops,
  401. rate_hw, rate_ops, gate_hw,
  402. gate_ops, flags);
  403. }