clk-rcg2.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (c) 2013, 2018, The Linux Foundation. All rights reserved.
  4. */
  5. #include <linux/kernel.h>
  6. #include <linux/bitops.h>
  7. #include <linux/err.h>
  8. #include <linux/bug.h>
  9. #include <linux/export.h>
  10. #include <linux/clk-provider.h>
  11. #include <linux/delay.h>
  12. #include <linux/regmap.h>
  13. #include <linux/math64.h>
  14. #include <asm/div64.h>
  15. #include "clk-rcg.h"
  16. #include "common.h"
  17. #define CMD_REG 0x0
  18. #define CMD_UPDATE BIT(0)
  19. #define CMD_ROOT_EN BIT(1)
  20. #define CMD_DIRTY_CFG BIT(4)
  21. #define CMD_DIRTY_N BIT(5)
  22. #define CMD_DIRTY_M BIT(6)
  23. #define CMD_DIRTY_D BIT(7)
  24. #define CMD_ROOT_OFF BIT(31)
  25. #define CFG_REG 0x4
  26. #define CFG_SRC_DIV_SHIFT 0
  27. #define CFG_SRC_SEL_SHIFT 8
  28. #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
  29. #define CFG_MODE_SHIFT 12
  30. #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
  31. #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
  32. #define CFG_HW_CLK_CTRL_MASK BIT(20)
  33. #define M_REG 0x8
  34. #define N_REG 0xc
  35. #define D_REG 0x10
  36. enum freq_policy {
  37. FLOOR,
  38. CEIL,
  39. };
  40. static int clk_rcg2_is_enabled(struct clk_hw *hw)
  41. {
  42. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  43. u32 cmd;
  44. int ret;
  45. ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
  46. if (ret)
  47. return ret;
  48. return (cmd & CMD_ROOT_OFF) == 0;
  49. }
  50. static u8 clk_rcg2_get_parent(struct clk_hw *hw)
  51. {
  52. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  53. int num_parents = clk_hw_get_num_parents(hw);
  54. u32 cfg;
  55. int i, ret;
  56. ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
  57. if (ret)
  58. goto err;
  59. cfg &= CFG_SRC_SEL_MASK;
  60. cfg >>= CFG_SRC_SEL_SHIFT;
  61. for (i = 0; i < num_parents; i++)
  62. if (cfg == rcg->parent_map[i].cfg)
  63. return i;
  64. err:
  65. pr_debug("%s: Clock %s has invalid parent, using default.\n",
  66. __func__, clk_hw_get_name(hw));
  67. return 0;
  68. }
  69. static int update_config(struct clk_rcg2 *rcg)
  70. {
  71. int count, ret;
  72. u32 cmd;
  73. struct clk_hw *hw = &rcg->clkr.hw;
  74. const char *name = clk_hw_get_name(hw);
  75. ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
  76. CMD_UPDATE, CMD_UPDATE);
  77. if (ret)
  78. return ret;
  79. /* Wait for update to take effect */
  80. for (count = 500; count > 0; count--) {
  81. ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
  82. if (ret)
  83. return ret;
  84. if (!(cmd & CMD_UPDATE))
  85. return 0;
  86. udelay(1);
  87. }
  88. WARN(1, "%s: rcg didn't update its configuration.", name);
  89. return -EBUSY;
  90. }
  91. static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
  92. {
  93. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  94. int ret;
  95. u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
  96. ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
  97. CFG_SRC_SEL_MASK, cfg);
  98. if (ret)
  99. return ret;
  100. return update_config(rcg);
  101. }
  102. /*
  103. * Calculate m/n:d rate
  104. *
  105. * parent_rate m
  106. * rate = ----------- x ---
  107. * hid_div n
  108. */
  109. static unsigned long
  110. calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
  111. {
  112. if (hid_div) {
  113. rate *= 2;
  114. rate /= hid_div + 1;
  115. }
  116. if (mode) {
  117. u64 tmp = rate;
  118. tmp *= m;
  119. do_div(tmp, n);
  120. rate = tmp;
  121. }
  122. return rate;
  123. }
  124. static unsigned long
  125. clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
  126. {
  127. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  128. u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
  129. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
  130. if (rcg->mnd_width) {
  131. mask = BIT(rcg->mnd_width) - 1;
  132. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m);
  133. m &= mask;
  134. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n);
  135. n = ~n;
  136. n &= mask;
  137. n += m;
  138. mode = cfg & CFG_MODE_MASK;
  139. mode >>= CFG_MODE_SHIFT;
  140. }
  141. mask = BIT(rcg->hid_width) - 1;
  142. hid_div = cfg >> CFG_SRC_DIV_SHIFT;
  143. hid_div &= mask;
  144. return calc_rate(parent_rate, m, n, mode, hid_div);
  145. }
  146. static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
  147. struct clk_rate_request *req,
  148. enum freq_policy policy)
  149. {
  150. unsigned long clk_flags, rate = req->rate;
  151. struct clk_hw *p;
  152. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  153. int index;
  154. switch (policy) {
  155. case FLOOR:
  156. f = qcom_find_freq_floor(f, rate);
  157. break;
  158. case CEIL:
  159. f = qcom_find_freq(f, rate);
  160. break;
  161. default:
  162. return -EINVAL;
  163. };
  164. if (!f)
  165. return -EINVAL;
  166. index = qcom_find_src_index(hw, rcg->parent_map, f->src);
  167. if (index < 0)
  168. return index;
  169. clk_flags = clk_hw_get_flags(hw);
  170. p = clk_hw_get_parent_by_index(hw, index);
  171. if (!p)
  172. return -EINVAL;
  173. if (clk_flags & CLK_SET_RATE_PARENT) {
  174. rate = f->freq;
  175. if (f->pre_div) {
  176. if (!rate)
  177. rate = req->rate;
  178. rate /= 2;
  179. rate *= f->pre_div + 1;
  180. }
  181. if (f->n) {
  182. u64 tmp = rate;
  183. tmp = tmp * f->n;
  184. do_div(tmp, f->m);
  185. rate = tmp;
  186. }
  187. } else {
  188. rate = clk_hw_get_rate(p);
  189. }
  190. req->best_parent_hw = p;
  191. req->best_parent_rate = rate;
  192. req->rate = f->freq;
  193. return 0;
  194. }
  195. static int clk_rcg2_determine_rate(struct clk_hw *hw,
  196. struct clk_rate_request *req)
  197. {
  198. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  199. return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL);
  200. }
  201. static int clk_rcg2_determine_floor_rate(struct clk_hw *hw,
  202. struct clk_rate_request *req)
  203. {
  204. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  205. return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR);
  206. }
  207. static int __clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
  208. {
  209. u32 cfg, mask;
  210. struct clk_hw *hw = &rcg->clkr.hw;
  211. int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
  212. if (index < 0)
  213. return index;
  214. if (rcg->mnd_width && f->n) {
  215. mask = BIT(rcg->mnd_width) - 1;
  216. ret = regmap_update_bits(rcg->clkr.regmap,
  217. rcg->cmd_rcgr + M_REG, mask, f->m);
  218. if (ret)
  219. return ret;
  220. ret = regmap_update_bits(rcg->clkr.regmap,
  221. rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m));
  222. if (ret)
  223. return ret;
  224. ret = regmap_update_bits(rcg->clkr.regmap,
  225. rcg->cmd_rcgr + D_REG, mask, ~f->n);
  226. if (ret)
  227. return ret;
  228. }
  229. mask = BIT(rcg->hid_width) - 1;
  230. mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK | CFG_HW_CLK_CTRL_MASK;
  231. cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
  232. cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
  233. if (rcg->mnd_width && f->n && (f->m != f->n))
  234. cfg |= CFG_MODE_DUAL_EDGE;
  235. return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
  236. mask, cfg);
  237. }
  238. static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
  239. {
  240. int ret;
  241. ret = __clk_rcg2_configure(rcg, f);
  242. if (ret)
  243. return ret;
  244. return update_config(rcg);
  245. }
  246. static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
  247. enum freq_policy policy)
  248. {
  249. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  250. const struct freq_tbl *f;
  251. switch (policy) {
  252. case FLOOR:
  253. f = qcom_find_freq_floor(rcg->freq_tbl, rate);
  254. break;
  255. case CEIL:
  256. f = qcom_find_freq(rcg->freq_tbl, rate);
  257. break;
  258. default:
  259. return -EINVAL;
  260. };
  261. if (!f)
  262. return -EINVAL;
  263. return clk_rcg2_configure(rcg, f);
  264. }
  265. static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
  266. unsigned long parent_rate)
  267. {
  268. return __clk_rcg2_set_rate(hw, rate, CEIL);
  269. }
  270. static int clk_rcg2_set_floor_rate(struct clk_hw *hw, unsigned long rate,
  271. unsigned long parent_rate)
  272. {
  273. return __clk_rcg2_set_rate(hw, rate, FLOOR);
  274. }
  275. static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
  276. unsigned long rate, unsigned long parent_rate, u8 index)
  277. {
  278. return __clk_rcg2_set_rate(hw, rate, CEIL);
  279. }
  280. static int clk_rcg2_set_floor_rate_and_parent(struct clk_hw *hw,
  281. unsigned long rate, unsigned long parent_rate, u8 index)
  282. {
  283. return __clk_rcg2_set_rate(hw, rate, FLOOR);
  284. }
  285. const struct clk_ops clk_rcg2_ops = {
  286. .is_enabled = clk_rcg2_is_enabled,
  287. .get_parent = clk_rcg2_get_parent,
  288. .set_parent = clk_rcg2_set_parent,
  289. .recalc_rate = clk_rcg2_recalc_rate,
  290. .determine_rate = clk_rcg2_determine_rate,
  291. .set_rate = clk_rcg2_set_rate,
  292. .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
  293. };
  294. EXPORT_SYMBOL_GPL(clk_rcg2_ops);
  295. const struct clk_ops clk_rcg2_floor_ops = {
  296. .is_enabled = clk_rcg2_is_enabled,
  297. .get_parent = clk_rcg2_get_parent,
  298. .set_parent = clk_rcg2_set_parent,
  299. .recalc_rate = clk_rcg2_recalc_rate,
  300. .determine_rate = clk_rcg2_determine_floor_rate,
  301. .set_rate = clk_rcg2_set_floor_rate,
  302. .set_rate_and_parent = clk_rcg2_set_floor_rate_and_parent,
  303. };
  304. EXPORT_SYMBOL_GPL(clk_rcg2_floor_ops);
  305. struct frac_entry {
  306. int num;
  307. int den;
  308. };
  309. static const struct frac_entry frac_table_675m[] = { /* link rate of 270M */
  310. { 52, 295 }, /* 119 M */
  311. { 11, 57 }, /* 130.25 M */
  312. { 63, 307 }, /* 138.50 M */
  313. { 11, 50 }, /* 148.50 M */
  314. { 47, 206 }, /* 154 M */
  315. { 31, 100 }, /* 205.25 M */
  316. { 107, 269 }, /* 268.50 M */
  317. { },
  318. };
  319. static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
  320. { 31, 211 }, /* 119 M */
  321. { 32, 199 }, /* 130.25 M */
  322. { 63, 307 }, /* 138.50 M */
  323. { 11, 60 }, /* 148.50 M */
  324. { 50, 263 }, /* 154 M */
  325. { 31, 120 }, /* 205.25 M */
  326. { 119, 359 }, /* 268.50 M */
  327. { },
  328. };
  329. static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
  330. unsigned long parent_rate)
  331. {
  332. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  333. struct freq_tbl f = *rcg->freq_tbl;
  334. const struct frac_entry *frac;
  335. int delta = 100000;
  336. s64 src_rate = parent_rate;
  337. s64 request;
  338. u32 mask = BIT(rcg->hid_width) - 1;
  339. u32 hid_div;
  340. if (src_rate == 810000000)
  341. frac = frac_table_810m;
  342. else
  343. frac = frac_table_675m;
  344. for (; frac->num; frac++) {
  345. request = rate;
  346. request *= frac->den;
  347. request = div_s64(request, frac->num);
  348. if ((src_rate < (request - delta)) ||
  349. (src_rate > (request + delta)))
  350. continue;
  351. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
  352. &hid_div);
  353. f.pre_div = hid_div;
  354. f.pre_div >>= CFG_SRC_DIV_SHIFT;
  355. f.pre_div &= mask;
  356. f.m = frac->num;
  357. f.n = frac->den;
  358. return clk_rcg2_configure(rcg, &f);
  359. }
  360. return -EINVAL;
  361. }
  362. static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
  363. unsigned long rate, unsigned long parent_rate, u8 index)
  364. {
  365. /* Parent index is set statically in frequency table */
  366. return clk_edp_pixel_set_rate(hw, rate, parent_rate);
  367. }
  368. static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
  369. struct clk_rate_request *req)
  370. {
  371. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  372. const struct freq_tbl *f = rcg->freq_tbl;
  373. const struct frac_entry *frac;
  374. int delta = 100000;
  375. s64 request;
  376. u32 mask = BIT(rcg->hid_width) - 1;
  377. u32 hid_div;
  378. int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
  379. /* Force the correct parent */
  380. req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
  381. req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
  382. if (req->best_parent_rate == 810000000)
  383. frac = frac_table_810m;
  384. else
  385. frac = frac_table_675m;
  386. for (; frac->num; frac++) {
  387. request = req->rate;
  388. request *= frac->den;
  389. request = div_s64(request, frac->num);
  390. if ((req->best_parent_rate < (request - delta)) ||
  391. (req->best_parent_rate > (request + delta)))
  392. continue;
  393. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
  394. &hid_div);
  395. hid_div >>= CFG_SRC_DIV_SHIFT;
  396. hid_div &= mask;
  397. req->rate = calc_rate(req->best_parent_rate,
  398. frac->num, frac->den,
  399. !!frac->den, hid_div);
  400. return 0;
  401. }
  402. return -EINVAL;
  403. }
  404. const struct clk_ops clk_edp_pixel_ops = {
  405. .is_enabled = clk_rcg2_is_enabled,
  406. .get_parent = clk_rcg2_get_parent,
  407. .set_parent = clk_rcg2_set_parent,
  408. .recalc_rate = clk_rcg2_recalc_rate,
  409. .set_rate = clk_edp_pixel_set_rate,
  410. .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
  411. .determine_rate = clk_edp_pixel_determine_rate,
  412. };
  413. EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
  414. static int clk_byte_determine_rate(struct clk_hw *hw,
  415. struct clk_rate_request *req)
  416. {
  417. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  418. const struct freq_tbl *f = rcg->freq_tbl;
  419. int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
  420. unsigned long parent_rate, div;
  421. u32 mask = BIT(rcg->hid_width) - 1;
  422. struct clk_hw *p;
  423. if (req->rate == 0)
  424. return -EINVAL;
  425. req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
  426. req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
  427. div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
  428. div = min_t(u32, div, mask);
  429. req->rate = calc_rate(parent_rate, 0, 0, 0, div);
  430. return 0;
  431. }
  432. static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
  433. unsigned long parent_rate)
  434. {
  435. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  436. struct freq_tbl f = *rcg->freq_tbl;
  437. unsigned long div;
  438. u32 mask = BIT(rcg->hid_width) - 1;
  439. div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
  440. div = min_t(u32, div, mask);
  441. f.pre_div = div;
  442. return clk_rcg2_configure(rcg, &f);
  443. }
  444. static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
  445. unsigned long rate, unsigned long parent_rate, u8 index)
  446. {
  447. /* Parent index is set statically in frequency table */
  448. return clk_byte_set_rate(hw, rate, parent_rate);
  449. }
  450. const struct clk_ops clk_byte_ops = {
  451. .is_enabled = clk_rcg2_is_enabled,
  452. .get_parent = clk_rcg2_get_parent,
  453. .set_parent = clk_rcg2_set_parent,
  454. .recalc_rate = clk_rcg2_recalc_rate,
  455. .set_rate = clk_byte_set_rate,
  456. .set_rate_and_parent = clk_byte_set_rate_and_parent,
  457. .determine_rate = clk_byte_determine_rate,
  458. };
  459. EXPORT_SYMBOL_GPL(clk_byte_ops);
  460. static int clk_byte2_determine_rate(struct clk_hw *hw,
  461. struct clk_rate_request *req)
  462. {
  463. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  464. unsigned long parent_rate, div;
  465. u32 mask = BIT(rcg->hid_width) - 1;
  466. struct clk_hw *p;
  467. unsigned long rate = req->rate;
  468. if (rate == 0)
  469. return -EINVAL;
  470. p = req->best_parent_hw;
  471. req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
  472. div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
  473. div = min_t(u32, div, mask);
  474. req->rate = calc_rate(parent_rate, 0, 0, 0, div);
  475. return 0;
  476. }
  477. static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
  478. unsigned long parent_rate)
  479. {
  480. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  481. struct freq_tbl f = { 0 };
  482. unsigned long div;
  483. int i, num_parents = clk_hw_get_num_parents(hw);
  484. u32 mask = BIT(rcg->hid_width) - 1;
  485. u32 cfg;
  486. div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
  487. div = min_t(u32, div, mask);
  488. f.pre_div = div;
  489. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
  490. cfg &= CFG_SRC_SEL_MASK;
  491. cfg >>= CFG_SRC_SEL_SHIFT;
  492. for (i = 0; i < num_parents; i++) {
  493. if (cfg == rcg->parent_map[i].cfg) {
  494. f.src = rcg->parent_map[i].src;
  495. return clk_rcg2_configure(rcg, &f);
  496. }
  497. }
  498. return -EINVAL;
  499. }
  500. static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
  501. unsigned long rate, unsigned long parent_rate, u8 index)
  502. {
  503. /* Read the hardware to determine parent during set_rate */
  504. return clk_byte2_set_rate(hw, rate, parent_rate);
  505. }
  506. const struct clk_ops clk_byte2_ops = {
  507. .is_enabled = clk_rcg2_is_enabled,
  508. .get_parent = clk_rcg2_get_parent,
  509. .set_parent = clk_rcg2_set_parent,
  510. .recalc_rate = clk_rcg2_recalc_rate,
  511. .set_rate = clk_byte2_set_rate,
  512. .set_rate_and_parent = clk_byte2_set_rate_and_parent,
  513. .determine_rate = clk_byte2_determine_rate,
  514. };
  515. EXPORT_SYMBOL_GPL(clk_byte2_ops);
  516. static const struct frac_entry frac_table_pixel[] = {
  517. { 3, 8 },
  518. { 2, 9 },
  519. { 4, 9 },
  520. { 1, 1 },
  521. { }
  522. };
  523. static int clk_pixel_determine_rate(struct clk_hw *hw,
  524. struct clk_rate_request *req)
  525. {
  526. unsigned long request, src_rate;
  527. int delta = 100000;
  528. const struct frac_entry *frac = frac_table_pixel;
  529. for (; frac->num; frac++) {
  530. request = (req->rate * frac->den) / frac->num;
  531. src_rate = clk_hw_round_rate(req->best_parent_hw, request);
  532. if ((src_rate < (request - delta)) ||
  533. (src_rate > (request + delta)))
  534. continue;
  535. req->best_parent_rate = src_rate;
  536. req->rate = (src_rate * frac->num) / frac->den;
  537. return 0;
  538. }
  539. return -EINVAL;
  540. }
  541. static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
  542. unsigned long parent_rate)
  543. {
  544. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  545. struct freq_tbl f = { 0 };
  546. const struct frac_entry *frac = frac_table_pixel;
  547. unsigned long request;
  548. int delta = 100000;
  549. u32 mask = BIT(rcg->hid_width) - 1;
  550. u32 hid_div, cfg;
  551. int i, num_parents = clk_hw_get_num_parents(hw);
  552. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
  553. cfg &= CFG_SRC_SEL_MASK;
  554. cfg >>= CFG_SRC_SEL_SHIFT;
  555. for (i = 0; i < num_parents; i++)
  556. if (cfg == rcg->parent_map[i].cfg) {
  557. f.src = rcg->parent_map[i].src;
  558. break;
  559. }
  560. for (; frac->num; frac++) {
  561. request = (rate * frac->den) / frac->num;
  562. if ((parent_rate < (request - delta)) ||
  563. (parent_rate > (request + delta)))
  564. continue;
  565. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
  566. &hid_div);
  567. f.pre_div = hid_div;
  568. f.pre_div >>= CFG_SRC_DIV_SHIFT;
  569. f.pre_div &= mask;
  570. f.m = frac->num;
  571. f.n = frac->den;
  572. return clk_rcg2_configure(rcg, &f);
  573. }
  574. return -EINVAL;
  575. }
  576. static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
  577. unsigned long parent_rate, u8 index)
  578. {
  579. return clk_pixel_set_rate(hw, rate, parent_rate);
  580. }
  581. const struct clk_ops clk_pixel_ops = {
  582. .is_enabled = clk_rcg2_is_enabled,
  583. .get_parent = clk_rcg2_get_parent,
  584. .set_parent = clk_rcg2_set_parent,
  585. .recalc_rate = clk_rcg2_recalc_rate,
  586. .set_rate = clk_pixel_set_rate,
  587. .set_rate_and_parent = clk_pixel_set_rate_and_parent,
  588. .determine_rate = clk_pixel_determine_rate,
  589. };
  590. EXPORT_SYMBOL_GPL(clk_pixel_ops);
  591. static int clk_gfx3d_determine_rate(struct clk_hw *hw,
  592. struct clk_rate_request *req)
  593. {
  594. struct clk_rate_request parent_req = { };
  595. struct clk_hw *p2, *p8, *p9, *xo;
  596. unsigned long p9_rate;
  597. int ret;
  598. xo = clk_hw_get_parent_by_index(hw, 0);
  599. if (req->rate == clk_hw_get_rate(xo)) {
  600. req->best_parent_hw = xo;
  601. return 0;
  602. }
  603. p9 = clk_hw_get_parent_by_index(hw, 2);
  604. p2 = clk_hw_get_parent_by_index(hw, 3);
  605. p8 = clk_hw_get_parent_by_index(hw, 4);
  606. /* PLL9 is a fixed rate PLL */
  607. p9_rate = clk_hw_get_rate(p9);
  608. parent_req.rate = req->rate = min(req->rate, p9_rate);
  609. if (req->rate == p9_rate) {
  610. req->rate = req->best_parent_rate = p9_rate;
  611. req->best_parent_hw = p9;
  612. return 0;
  613. }
  614. if (req->best_parent_hw == p9) {
  615. /* Are we going back to a previously used rate? */
  616. if (clk_hw_get_rate(p8) == req->rate)
  617. req->best_parent_hw = p8;
  618. else
  619. req->best_parent_hw = p2;
  620. } else if (req->best_parent_hw == p8) {
  621. req->best_parent_hw = p2;
  622. } else {
  623. req->best_parent_hw = p8;
  624. }
  625. ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
  626. if (ret)
  627. return ret;
  628. req->rate = req->best_parent_rate = parent_req.rate;
  629. return 0;
  630. }
  631. static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
  632. unsigned long parent_rate, u8 index)
  633. {
  634. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  635. u32 cfg;
  636. int ret;
  637. /* Just mux it, we don't use the division or m/n hardware */
  638. cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
  639. ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
  640. if (ret)
  641. return ret;
  642. return update_config(rcg);
  643. }
  644. static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
  645. unsigned long parent_rate)
  646. {
  647. /*
  648. * We should never get here; clk_gfx3d_determine_rate() should always
  649. * make us use a different parent than what we're currently using, so
  650. * clk_gfx3d_set_rate_and_parent() should always be called.
  651. */
  652. return 0;
  653. }
  654. const struct clk_ops clk_gfx3d_ops = {
  655. .is_enabled = clk_rcg2_is_enabled,
  656. .get_parent = clk_rcg2_get_parent,
  657. .set_parent = clk_rcg2_set_parent,
  658. .recalc_rate = clk_rcg2_recalc_rate,
  659. .set_rate = clk_gfx3d_set_rate,
  660. .set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
  661. .determine_rate = clk_gfx3d_determine_rate,
  662. };
  663. EXPORT_SYMBOL_GPL(clk_gfx3d_ops);
  664. static int clk_rcg2_set_force_enable(struct clk_hw *hw)
  665. {
  666. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  667. const char *name = clk_hw_get_name(hw);
  668. int ret, count;
  669. ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
  670. CMD_ROOT_EN, CMD_ROOT_EN);
  671. if (ret)
  672. return ret;
  673. /* wait for RCG to turn ON */
  674. for (count = 500; count > 0; count--) {
  675. if (clk_rcg2_is_enabled(hw))
  676. return 0;
  677. udelay(1);
  678. }
  679. pr_err("%s: RCG did not turn on\n", name);
  680. return -ETIMEDOUT;
  681. }
  682. static int clk_rcg2_clear_force_enable(struct clk_hw *hw)
  683. {
  684. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  685. return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
  686. CMD_ROOT_EN, 0);
  687. }
  688. static int
  689. clk_rcg2_shared_force_enable_clear(struct clk_hw *hw, const struct freq_tbl *f)
  690. {
  691. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  692. int ret;
  693. ret = clk_rcg2_set_force_enable(hw);
  694. if (ret)
  695. return ret;
  696. ret = clk_rcg2_configure(rcg, f);
  697. if (ret)
  698. return ret;
  699. return clk_rcg2_clear_force_enable(hw);
  700. }
  701. static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
  702. unsigned long parent_rate)
  703. {
  704. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  705. const struct freq_tbl *f;
  706. f = qcom_find_freq(rcg->freq_tbl, rate);
  707. if (!f)
  708. return -EINVAL;
  709. /*
  710. * In case clock is disabled, update the CFG, M, N and D registers
  711. * and don't hit the update bit of CMD register.
  712. */
  713. if (!__clk_is_enabled(hw->clk))
  714. return __clk_rcg2_configure(rcg, f);
  715. return clk_rcg2_shared_force_enable_clear(hw, f);
  716. }
  717. static int clk_rcg2_shared_set_rate_and_parent(struct clk_hw *hw,
  718. unsigned long rate, unsigned long parent_rate, u8 index)
  719. {
  720. return clk_rcg2_shared_set_rate(hw, rate, parent_rate);
  721. }
  722. static int clk_rcg2_shared_enable(struct clk_hw *hw)
  723. {
  724. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  725. int ret;
  726. /*
  727. * Set the update bit because required configuration has already
  728. * been written in clk_rcg2_shared_set_rate()
  729. */
  730. ret = clk_rcg2_set_force_enable(hw);
  731. if (ret)
  732. return ret;
  733. ret = update_config(rcg);
  734. if (ret)
  735. return ret;
  736. return clk_rcg2_clear_force_enable(hw);
  737. }
  738. static void clk_rcg2_shared_disable(struct clk_hw *hw)
  739. {
  740. struct clk_rcg2 *rcg = to_clk_rcg2(hw);
  741. u32 cfg;
  742. /*
  743. * Store current configuration as switching to safe source would clear
  744. * the SRC and DIV of CFG register
  745. */
  746. regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
  747. /*
  748. * Park the RCG at a safe configuration - sourced off of safe source.
  749. * Force enable and disable the RCG while configuring it to safeguard
  750. * against any update signal coming from the downstream clock.
  751. * The current parent is still prepared and enabled at this point, and
  752. * the safe source is always on while application processor subsystem
  753. * is online. Therefore, the RCG can safely switch its parent.
  754. */
  755. clk_rcg2_set_force_enable(hw);
  756. regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
  757. rcg->safe_src_index << CFG_SRC_SEL_SHIFT);
  758. update_config(rcg);
  759. clk_rcg2_clear_force_enable(hw);
  760. /* Write back the stored configuration corresponding to current rate */
  761. regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
  762. }
  763. const struct clk_ops clk_rcg2_shared_ops = {
  764. .enable = clk_rcg2_shared_enable,
  765. .disable = clk_rcg2_shared_disable,
  766. .get_parent = clk_rcg2_get_parent,
  767. .set_parent = clk_rcg2_set_parent,
  768. .recalc_rate = clk_rcg2_recalc_rate,
  769. .determine_rate = clk_rcg2_determine_rate,
  770. .set_rate = clk_rcg2_shared_set_rate,
  771. .set_rate_and_parent = clk_rcg2_shared_set_rate_and_parent,
  772. };
  773. EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);