clk-rcg.c 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894
  1. /*
  2. * Copyright (c) 2013, The Linux Foundation. All rights reserved.
  3. *
  4. * This software is licensed under the terms of the GNU General Public
  5. * License version 2, as published by the Free Software Foundation, and
  6. * may be copied, distributed, and modified under those terms.
  7. *
  8. * This program is distributed in the hope that it will be useful,
  9. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  11. * GNU General Public License for more details.
  12. */
  13. #include <linux/kernel.h>
  14. #include <linux/bitops.h>
  15. #include <linux/err.h>
  16. #include <linux/export.h>
  17. #include <linux/clk-provider.h>
  18. #include <linux/regmap.h>
  19. #include <asm/div64.h>
  20. #include "clk-rcg.h"
  21. #include "common.h"
  22. static u32 ns_to_src(struct src_sel *s, u32 ns)
  23. {
  24. ns >>= s->src_sel_shift;
  25. ns &= SRC_SEL_MASK;
  26. return ns;
  27. }
  28. static u32 src_to_ns(struct src_sel *s, u8 src, u32 ns)
  29. {
  30. u32 mask;
  31. mask = SRC_SEL_MASK;
  32. mask <<= s->src_sel_shift;
  33. ns &= ~mask;
  34. ns |= src << s->src_sel_shift;
  35. return ns;
  36. }
  37. static u8 clk_rcg_get_parent(struct clk_hw *hw)
  38. {
  39. struct clk_rcg *rcg = to_clk_rcg(hw);
  40. int num_parents = clk_hw_get_num_parents(hw);
  41. u32 ns;
  42. int i, ret;
  43. ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
  44. if (ret)
  45. goto err;
  46. ns = ns_to_src(&rcg->s, ns);
  47. for (i = 0; i < num_parents; i++)
  48. if (ns == rcg->s.parent_map[i].cfg)
  49. return i;
  50. err:
  51. pr_debug("%s: Clock %s has invalid parent, using default.\n",
  52. __func__, clk_hw_get_name(hw));
  53. return 0;
  54. }
  55. static int reg_to_bank(struct clk_dyn_rcg *rcg, u32 bank)
  56. {
  57. bank &= BIT(rcg->mux_sel_bit);
  58. return !!bank;
  59. }
  60. static u8 clk_dyn_rcg_get_parent(struct clk_hw *hw)
  61. {
  62. struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
  63. int num_parents = clk_hw_get_num_parents(hw);
  64. u32 ns, reg;
  65. int bank;
  66. int i, ret;
  67. struct src_sel *s;
  68. ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
  69. if (ret)
  70. goto err;
  71. bank = reg_to_bank(rcg, reg);
  72. s = &rcg->s[bank];
  73. ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
  74. if (ret)
  75. goto err;
  76. ns = ns_to_src(s, ns);
  77. for (i = 0; i < num_parents; i++)
  78. if (ns == s->parent_map[i].cfg)
  79. return i;
  80. err:
  81. pr_debug("%s: Clock %s has invalid parent, using default.\n",
  82. __func__, clk_hw_get_name(hw));
  83. return 0;
  84. }
  85. static int clk_rcg_set_parent(struct clk_hw *hw, u8 index)
  86. {
  87. struct clk_rcg *rcg = to_clk_rcg(hw);
  88. u32 ns;
  89. regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
  90. ns = src_to_ns(&rcg->s, rcg->s.parent_map[index].cfg, ns);
  91. regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
  92. return 0;
  93. }
  94. static u32 md_to_m(struct mn *mn, u32 md)
  95. {
  96. md >>= mn->m_val_shift;
  97. md &= BIT(mn->width) - 1;
  98. return md;
  99. }
  100. static u32 ns_to_pre_div(struct pre_div *p, u32 ns)
  101. {
  102. ns >>= p->pre_div_shift;
  103. ns &= BIT(p->pre_div_width) - 1;
  104. return ns;
  105. }
  106. static u32 pre_div_to_ns(struct pre_div *p, u8 pre_div, u32 ns)
  107. {
  108. u32 mask;
  109. mask = BIT(p->pre_div_width) - 1;
  110. mask <<= p->pre_div_shift;
  111. ns &= ~mask;
  112. ns |= pre_div << p->pre_div_shift;
  113. return ns;
  114. }
  115. static u32 mn_to_md(struct mn *mn, u32 m, u32 n, u32 md)
  116. {
  117. u32 mask, mask_w;
  118. mask_w = BIT(mn->width) - 1;
  119. mask = (mask_w << mn->m_val_shift) | mask_w;
  120. md &= ~mask;
  121. if (n) {
  122. m <<= mn->m_val_shift;
  123. md |= m;
  124. md |= ~n & mask_w;
  125. }
  126. return md;
  127. }
  128. static u32 ns_m_to_n(struct mn *mn, u32 ns, u32 m)
  129. {
  130. ns = ~ns >> mn->n_val_shift;
  131. ns &= BIT(mn->width) - 1;
  132. return ns + m;
  133. }
  134. static u32 reg_to_mnctr_mode(struct mn *mn, u32 val)
  135. {
  136. val >>= mn->mnctr_mode_shift;
  137. val &= MNCTR_MODE_MASK;
  138. return val;
  139. }
  140. static u32 mn_to_ns(struct mn *mn, u32 m, u32 n, u32 ns)
  141. {
  142. u32 mask;
  143. mask = BIT(mn->width) - 1;
  144. mask <<= mn->n_val_shift;
  145. ns &= ~mask;
  146. if (n) {
  147. n = n - m;
  148. n = ~n;
  149. n &= BIT(mn->width) - 1;
  150. n <<= mn->n_val_shift;
  151. ns |= n;
  152. }
  153. return ns;
  154. }
  155. static u32 mn_to_reg(struct mn *mn, u32 m, u32 n, u32 val)
  156. {
  157. u32 mask;
  158. mask = MNCTR_MODE_MASK << mn->mnctr_mode_shift;
  159. mask |= BIT(mn->mnctr_en_bit);
  160. val &= ~mask;
  161. if (n) {
  162. val |= BIT(mn->mnctr_en_bit);
  163. val |= MNCTR_MODE_DUAL << mn->mnctr_mode_shift;
  164. }
  165. return val;
  166. }
  167. static int configure_bank(struct clk_dyn_rcg *rcg, const struct freq_tbl *f)
  168. {
  169. u32 ns, md, reg;
  170. int bank, new_bank, ret, index;
  171. struct mn *mn;
  172. struct pre_div *p;
  173. struct src_sel *s;
  174. bool enabled;
  175. u32 md_reg, ns_reg;
  176. bool banked_mn = !!rcg->mn[1].width;
  177. bool banked_p = !!rcg->p[1].pre_div_width;
  178. struct clk_hw *hw = &rcg->clkr.hw;
  179. enabled = __clk_is_enabled(hw->clk);
  180. ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
  181. if (ret)
  182. return ret;
  183. bank = reg_to_bank(rcg, reg);
  184. new_bank = enabled ? !bank : bank;
  185. ns_reg = rcg->ns_reg[new_bank];
  186. ret = regmap_read(rcg->clkr.regmap, ns_reg, &ns);
  187. if (ret)
  188. return ret;
  189. if (banked_mn) {
  190. mn = &rcg->mn[new_bank];
  191. md_reg = rcg->md_reg[new_bank];
  192. ns |= BIT(mn->mnctr_reset_bit);
  193. ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
  194. if (ret)
  195. return ret;
  196. ret = regmap_read(rcg->clkr.regmap, md_reg, &md);
  197. if (ret)
  198. return ret;
  199. md = mn_to_md(mn, f->m, f->n, md);
  200. ret = regmap_write(rcg->clkr.regmap, md_reg, md);
  201. if (ret)
  202. return ret;
  203. ns = mn_to_ns(mn, f->m, f->n, ns);
  204. ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
  205. if (ret)
  206. return ret;
  207. /* Two NS registers means mode control is in NS register */
  208. if (rcg->ns_reg[0] != rcg->ns_reg[1]) {
  209. ns = mn_to_reg(mn, f->m, f->n, ns);
  210. ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
  211. if (ret)
  212. return ret;
  213. } else {
  214. reg = mn_to_reg(mn, f->m, f->n, reg);
  215. ret = regmap_write(rcg->clkr.regmap, rcg->bank_reg,
  216. reg);
  217. if (ret)
  218. return ret;
  219. }
  220. ns &= ~BIT(mn->mnctr_reset_bit);
  221. ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
  222. if (ret)
  223. return ret;
  224. }
  225. if (banked_p) {
  226. p = &rcg->p[new_bank];
  227. ns = pre_div_to_ns(p, f->pre_div - 1, ns);
  228. }
  229. s = &rcg->s[new_bank];
  230. index = qcom_find_src_index(hw, s->parent_map, f->src);
  231. if (index < 0)
  232. return index;
  233. ns = src_to_ns(s, s->parent_map[index].cfg, ns);
  234. ret = regmap_write(rcg->clkr.regmap, ns_reg, ns);
  235. if (ret)
  236. return ret;
  237. if (enabled) {
  238. ret = regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
  239. if (ret)
  240. return ret;
  241. reg ^= BIT(rcg->mux_sel_bit);
  242. ret = regmap_write(rcg->clkr.regmap, rcg->bank_reg, reg);
  243. if (ret)
  244. return ret;
  245. }
  246. return 0;
  247. }
  248. static int clk_dyn_rcg_set_parent(struct clk_hw *hw, u8 index)
  249. {
  250. struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
  251. u32 ns, md, reg;
  252. int bank;
  253. struct freq_tbl f = { 0 };
  254. bool banked_mn = !!rcg->mn[1].width;
  255. bool banked_p = !!rcg->p[1].pre_div_width;
  256. regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
  257. bank = reg_to_bank(rcg, reg);
  258. regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
  259. if (banked_mn) {
  260. regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
  261. f.m = md_to_m(&rcg->mn[bank], md);
  262. f.n = ns_m_to_n(&rcg->mn[bank], ns, f.m);
  263. }
  264. if (banked_p)
  265. f.pre_div = ns_to_pre_div(&rcg->p[bank], ns) + 1;
  266. f.src = qcom_find_src_index(hw, rcg->s[bank].parent_map, index);
  267. return configure_bank(rcg, &f);
  268. }
  269. /*
  270. * Calculate m/n:d rate
  271. *
  272. * parent_rate m
  273. * rate = ----------- x ---
  274. * pre_div n
  275. */
  276. static unsigned long
  277. calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 pre_div)
  278. {
  279. if (pre_div)
  280. rate /= pre_div + 1;
  281. if (mode) {
  282. u64 tmp = rate;
  283. tmp *= m;
  284. do_div(tmp, n);
  285. rate = tmp;
  286. }
  287. return rate;
  288. }
  289. static unsigned long
  290. clk_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
  291. {
  292. struct clk_rcg *rcg = to_clk_rcg(hw);
  293. u32 pre_div, m = 0, n = 0, ns, md, mode = 0;
  294. struct mn *mn = &rcg->mn;
  295. regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
  296. pre_div = ns_to_pre_div(&rcg->p, ns);
  297. if (rcg->mn.width) {
  298. regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
  299. m = md_to_m(mn, md);
  300. n = ns_m_to_n(mn, ns, m);
  301. /* MN counter mode is in hw.enable_reg sometimes */
  302. if (rcg->clkr.enable_reg != rcg->ns_reg)
  303. regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &mode);
  304. else
  305. mode = ns;
  306. mode = reg_to_mnctr_mode(mn, mode);
  307. }
  308. return calc_rate(parent_rate, m, n, mode, pre_div);
  309. }
  310. static unsigned long
  311. clk_dyn_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
  312. {
  313. struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
  314. u32 m, n, pre_div, ns, md, mode, reg;
  315. int bank;
  316. struct mn *mn;
  317. bool banked_p = !!rcg->p[1].pre_div_width;
  318. bool banked_mn = !!rcg->mn[1].width;
  319. regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
  320. bank = reg_to_bank(rcg, reg);
  321. regmap_read(rcg->clkr.regmap, rcg->ns_reg[bank], &ns);
  322. m = n = pre_div = mode = 0;
  323. if (banked_mn) {
  324. mn = &rcg->mn[bank];
  325. regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
  326. m = md_to_m(mn, md);
  327. n = ns_m_to_n(mn, ns, m);
  328. /* Two NS registers means mode control is in NS register */
  329. if (rcg->ns_reg[0] != rcg->ns_reg[1])
  330. reg = ns;
  331. mode = reg_to_mnctr_mode(mn, reg);
  332. }
  333. if (banked_p)
  334. pre_div = ns_to_pre_div(&rcg->p[bank], ns);
  335. return calc_rate(parent_rate, m, n, mode, pre_div);
  336. }
  337. static int _freq_tbl_determine_rate(struct clk_hw *hw, const struct freq_tbl *f,
  338. struct clk_rate_request *req,
  339. const struct parent_map *parent_map)
  340. {
  341. unsigned long clk_flags, rate = req->rate;
  342. struct clk_hw *p;
  343. int index;
  344. f = qcom_find_freq(f, rate);
  345. if (!f)
  346. return -EINVAL;
  347. index = qcom_find_src_index(hw, parent_map, f->src);
  348. if (index < 0)
  349. return index;
  350. clk_flags = clk_hw_get_flags(hw);
  351. p = clk_hw_get_parent_by_index(hw, index);
  352. if (clk_flags & CLK_SET_RATE_PARENT) {
  353. rate = rate * f->pre_div;
  354. if (f->n) {
  355. u64 tmp = rate;
  356. tmp = tmp * f->n;
  357. do_div(tmp, f->m);
  358. rate = tmp;
  359. }
  360. } else {
  361. rate = clk_hw_get_rate(p);
  362. }
  363. req->best_parent_hw = p;
  364. req->best_parent_rate = rate;
  365. req->rate = f->freq;
  366. return 0;
  367. }
  368. static int clk_rcg_determine_rate(struct clk_hw *hw,
  369. struct clk_rate_request *req)
  370. {
  371. struct clk_rcg *rcg = to_clk_rcg(hw);
  372. return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req,
  373. rcg->s.parent_map);
  374. }
  375. static int clk_dyn_rcg_determine_rate(struct clk_hw *hw,
  376. struct clk_rate_request *req)
  377. {
  378. struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
  379. u32 reg;
  380. int bank;
  381. struct src_sel *s;
  382. regmap_read(rcg->clkr.regmap, rcg->bank_reg, &reg);
  383. bank = reg_to_bank(rcg, reg);
  384. s = &rcg->s[bank];
  385. return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, s->parent_map);
  386. }
  387. static int clk_rcg_bypass_determine_rate(struct clk_hw *hw,
  388. struct clk_rate_request *req)
  389. {
  390. struct clk_rcg *rcg = to_clk_rcg(hw);
  391. const struct freq_tbl *f = rcg->freq_tbl;
  392. struct clk_hw *p;
  393. int index = qcom_find_src_index(hw, rcg->s.parent_map, f->src);
  394. req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
  395. req->best_parent_rate = clk_hw_round_rate(p, req->rate);
  396. req->rate = req->best_parent_rate;
  397. return 0;
  398. }
  399. static int __clk_rcg_set_rate(struct clk_rcg *rcg, const struct freq_tbl *f)
  400. {
  401. u32 ns, md, ctl;
  402. struct mn *mn = &rcg->mn;
  403. u32 mask = 0;
  404. unsigned int reset_reg;
  405. if (rcg->mn.reset_in_cc)
  406. reset_reg = rcg->clkr.enable_reg;
  407. else
  408. reset_reg = rcg->ns_reg;
  409. if (rcg->mn.width) {
  410. mask = BIT(mn->mnctr_reset_bit);
  411. regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, mask);
  412. regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
  413. md = mn_to_md(mn, f->m, f->n, md);
  414. regmap_write(rcg->clkr.regmap, rcg->md_reg, md);
  415. regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
  416. /* MN counter mode is in hw.enable_reg sometimes */
  417. if (rcg->clkr.enable_reg != rcg->ns_reg) {
  418. regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &ctl);
  419. ctl = mn_to_reg(mn, f->m, f->n, ctl);
  420. regmap_write(rcg->clkr.regmap, rcg->clkr.enable_reg, ctl);
  421. } else {
  422. ns = mn_to_reg(mn, f->m, f->n, ns);
  423. }
  424. ns = mn_to_ns(mn, f->m, f->n, ns);
  425. } else {
  426. regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
  427. }
  428. ns = pre_div_to_ns(&rcg->p, f->pre_div - 1, ns);
  429. regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
  430. regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, 0);
  431. return 0;
  432. }
  433. static int clk_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
  434. unsigned long parent_rate)
  435. {
  436. struct clk_rcg *rcg = to_clk_rcg(hw);
  437. const struct freq_tbl *f;
  438. f = qcom_find_freq(rcg->freq_tbl, rate);
  439. if (!f)
  440. return -EINVAL;
  441. return __clk_rcg_set_rate(rcg, f);
  442. }
  443. static int clk_rcg_bypass_set_rate(struct clk_hw *hw, unsigned long rate,
  444. unsigned long parent_rate)
  445. {
  446. struct clk_rcg *rcg = to_clk_rcg(hw);
  447. return __clk_rcg_set_rate(rcg, rcg->freq_tbl);
  448. }
  449. static int clk_rcg_bypass2_determine_rate(struct clk_hw *hw,
  450. struct clk_rate_request *req)
  451. {
  452. struct clk_hw *p;
  453. p = req->best_parent_hw;
  454. req->best_parent_rate = clk_hw_round_rate(p, req->rate);
  455. req->rate = req->best_parent_rate;
  456. return 0;
  457. }
  458. static int clk_rcg_bypass2_set_rate(struct clk_hw *hw, unsigned long rate,
  459. unsigned long parent_rate)
  460. {
  461. struct clk_rcg *rcg = to_clk_rcg(hw);
  462. struct freq_tbl f = { 0 };
  463. u32 ns, src;
  464. int i, ret, num_parents = clk_hw_get_num_parents(hw);
  465. ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
  466. if (ret)
  467. return ret;
  468. src = ns_to_src(&rcg->s, ns);
  469. f.pre_div = ns_to_pre_div(&rcg->p, ns) + 1;
  470. for (i = 0; i < num_parents; i++) {
  471. if (src == rcg->s.parent_map[i].cfg) {
  472. f.src = rcg->s.parent_map[i].src;
  473. return __clk_rcg_set_rate(rcg, &f);
  474. }
  475. }
  476. return -EINVAL;
  477. }
  478. static int clk_rcg_bypass2_set_rate_and_parent(struct clk_hw *hw,
  479. unsigned long rate, unsigned long parent_rate, u8 index)
  480. {
  481. /* Read the hardware to determine parent during set_rate */
  482. return clk_rcg_bypass2_set_rate(hw, rate, parent_rate);
  483. }
  484. struct frac_entry {
  485. int num;
  486. int den;
  487. };
  488. static const struct frac_entry pixel_table[] = {
  489. { 1, 2 },
  490. { 1, 3 },
  491. { 3, 16 },
  492. { }
  493. };
  494. static int clk_rcg_pixel_determine_rate(struct clk_hw *hw,
  495. struct clk_rate_request *req)
  496. {
  497. int delta = 100000;
  498. const struct frac_entry *frac = pixel_table;
  499. unsigned long request, src_rate;
  500. for (; frac->num; frac++) {
  501. request = (req->rate * frac->den) / frac->num;
  502. src_rate = clk_hw_round_rate(req->best_parent_hw, request);
  503. if ((src_rate < (request - delta)) ||
  504. (src_rate > (request + delta)))
  505. continue;
  506. req->best_parent_rate = src_rate;
  507. req->rate = (src_rate * frac->num) / frac->den;
  508. return 0;
  509. }
  510. return -EINVAL;
  511. }
  512. static int clk_rcg_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
  513. unsigned long parent_rate)
  514. {
  515. struct clk_rcg *rcg = to_clk_rcg(hw);
  516. int delta = 100000;
  517. const struct frac_entry *frac = pixel_table;
  518. unsigned long request;
  519. struct freq_tbl f = { 0 };
  520. u32 ns, src;
  521. int i, ret, num_parents = clk_hw_get_num_parents(hw);
  522. ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
  523. if (ret)
  524. return ret;
  525. src = ns_to_src(&rcg->s, ns);
  526. f.pre_div = ns_to_pre_div(&rcg->p, ns) + 1;
  527. for (i = 0; i < num_parents; i++) {
  528. if (src == rcg->s.parent_map[i].cfg) {
  529. f.src = rcg->s.parent_map[i].src;
  530. break;
  531. }
  532. }
  533. /* let us find appropriate m/n values for this */
  534. for (; frac->num; frac++) {
  535. request = (rate * frac->den) / frac->num;
  536. if ((parent_rate < (request - delta)) ||
  537. (parent_rate > (request + delta)))
  538. continue;
  539. f.m = frac->num;
  540. f.n = frac->den;
  541. return __clk_rcg_set_rate(rcg, &f);
  542. }
  543. return -EINVAL;
  544. }
  545. static int clk_rcg_pixel_set_rate_and_parent(struct clk_hw *hw,
  546. unsigned long rate, unsigned long parent_rate, u8 index)
  547. {
  548. return clk_rcg_pixel_set_rate(hw, rate, parent_rate);
  549. }
  550. static int clk_rcg_esc_determine_rate(struct clk_hw *hw,
  551. struct clk_rate_request *req)
  552. {
  553. struct clk_rcg *rcg = to_clk_rcg(hw);
  554. int pre_div_max = BIT(rcg->p.pre_div_width);
  555. int div;
  556. unsigned long src_rate;
  557. if (req->rate == 0)
  558. return -EINVAL;
  559. src_rate = clk_hw_get_rate(req->best_parent_hw);
  560. div = src_rate / req->rate;
  561. if (div >= 1 && div <= pre_div_max) {
  562. req->best_parent_rate = src_rate;
  563. req->rate = src_rate / div;
  564. return 0;
  565. }
  566. return -EINVAL;
  567. }
  568. static int clk_rcg_esc_set_rate(struct clk_hw *hw, unsigned long rate,
  569. unsigned long parent_rate)
  570. {
  571. struct clk_rcg *rcg = to_clk_rcg(hw);
  572. struct freq_tbl f = { 0 };
  573. int pre_div_max = BIT(rcg->p.pre_div_width);
  574. int div;
  575. u32 ns;
  576. int i, ret, num_parents = clk_hw_get_num_parents(hw);
  577. if (rate == 0)
  578. return -EINVAL;
  579. ret = regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
  580. if (ret)
  581. return ret;
  582. ns = ns_to_src(&rcg->s, ns);
  583. for (i = 0; i < num_parents; i++) {
  584. if (ns == rcg->s.parent_map[i].cfg) {
  585. f.src = rcg->s.parent_map[i].src;
  586. break;
  587. }
  588. }
  589. div = parent_rate / rate;
  590. if (div >= 1 && div <= pre_div_max) {
  591. f.pre_div = div;
  592. return __clk_rcg_set_rate(rcg, &f);
  593. }
  594. return -EINVAL;
  595. }
  596. static int clk_rcg_esc_set_rate_and_parent(struct clk_hw *hw,
  597. unsigned long rate, unsigned long parent_rate, u8 index)
  598. {
  599. return clk_rcg_esc_set_rate(hw, rate, parent_rate);
  600. }
  601. /*
  602. * This type of clock has a glitch-free mux that switches between the output of
  603. * the M/N counter and an always on clock source (XO). When clk_set_rate() is
  604. * called we need to make sure that we don't switch to the M/N counter if it
  605. * isn't clocking because the mux will get stuck and the clock will stop
  606. * outputting a clock. This can happen if the framework isn't aware that this
  607. * clock is on and so clk_set_rate() doesn't turn on the new parent. To fix
  608. * this we switch the mux in the enable/disable ops and reprogram the M/N
  609. * counter in the set_rate op. We also make sure to switch away from the M/N
  610. * counter in set_rate if software thinks the clock is off.
  611. */
  612. static int clk_rcg_lcc_set_rate(struct clk_hw *hw, unsigned long rate,
  613. unsigned long parent_rate)
  614. {
  615. struct clk_rcg *rcg = to_clk_rcg(hw);
  616. const struct freq_tbl *f;
  617. int ret;
  618. u32 gfm = BIT(10);
  619. f = qcom_find_freq(rcg->freq_tbl, rate);
  620. if (!f)
  621. return -EINVAL;
  622. /* Switch to XO to avoid glitches */
  623. regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, 0);
  624. ret = __clk_rcg_set_rate(rcg, f);
  625. /* Switch back to M/N if it's clocking */
  626. if (__clk_is_enabled(hw->clk))
  627. regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, gfm);
  628. return ret;
  629. }
  630. static int clk_rcg_lcc_enable(struct clk_hw *hw)
  631. {
  632. struct clk_rcg *rcg = to_clk_rcg(hw);
  633. u32 gfm = BIT(10);
  634. /* Use M/N */
  635. return regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, gfm);
  636. }
  637. static void clk_rcg_lcc_disable(struct clk_hw *hw)
  638. {
  639. struct clk_rcg *rcg = to_clk_rcg(hw);
  640. u32 gfm = BIT(10);
  641. /* Use XO */
  642. regmap_update_bits(rcg->clkr.regmap, rcg->ns_reg, gfm, 0);
  643. }
  644. static int __clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate)
  645. {
  646. struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
  647. const struct freq_tbl *f;
  648. f = qcom_find_freq(rcg->freq_tbl, rate);
  649. if (!f)
  650. return -EINVAL;
  651. return configure_bank(rcg, f);
  652. }
  653. static int clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
  654. unsigned long parent_rate)
  655. {
  656. return __clk_dyn_rcg_set_rate(hw, rate);
  657. }
  658. static int clk_dyn_rcg_set_rate_and_parent(struct clk_hw *hw,
  659. unsigned long rate, unsigned long parent_rate, u8 index)
  660. {
  661. return __clk_dyn_rcg_set_rate(hw, rate);
  662. }
  663. const struct clk_ops clk_rcg_ops = {
  664. .enable = clk_enable_regmap,
  665. .disable = clk_disable_regmap,
  666. .get_parent = clk_rcg_get_parent,
  667. .set_parent = clk_rcg_set_parent,
  668. .recalc_rate = clk_rcg_recalc_rate,
  669. .determine_rate = clk_rcg_determine_rate,
  670. .set_rate = clk_rcg_set_rate,
  671. };
  672. EXPORT_SYMBOL_GPL(clk_rcg_ops);
  673. const struct clk_ops clk_rcg_bypass_ops = {
  674. .enable = clk_enable_regmap,
  675. .disable = clk_disable_regmap,
  676. .get_parent = clk_rcg_get_parent,
  677. .set_parent = clk_rcg_set_parent,
  678. .recalc_rate = clk_rcg_recalc_rate,
  679. .determine_rate = clk_rcg_bypass_determine_rate,
  680. .set_rate = clk_rcg_bypass_set_rate,
  681. };
  682. EXPORT_SYMBOL_GPL(clk_rcg_bypass_ops);
  683. const struct clk_ops clk_rcg_bypass2_ops = {
  684. .enable = clk_enable_regmap,
  685. .disable = clk_disable_regmap,
  686. .get_parent = clk_rcg_get_parent,
  687. .set_parent = clk_rcg_set_parent,
  688. .recalc_rate = clk_rcg_recalc_rate,
  689. .determine_rate = clk_rcg_bypass2_determine_rate,
  690. .set_rate = clk_rcg_bypass2_set_rate,
  691. .set_rate_and_parent = clk_rcg_bypass2_set_rate_and_parent,
  692. };
  693. EXPORT_SYMBOL_GPL(clk_rcg_bypass2_ops);
  694. const struct clk_ops clk_rcg_pixel_ops = {
  695. .enable = clk_enable_regmap,
  696. .disable = clk_disable_regmap,
  697. .get_parent = clk_rcg_get_parent,
  698. .set_parent = clk_rcg_set_parent,
  699. .recalc_rate = clk_rcg_recalc_rate,
  700. .determine_rate = clk_rcg_pixel_determine_rate,
  701. .set_rate = clk_rcg_pixel_set_rate,
  702. .set_rate_and_parent = clk_rcg_pixel_set_rate_and_parent,
  703. };
  704. EXPORT_SYMBOL_GPL(clk_rcg_pixel_ops);
  705. const struct clk_ops clk_rcg_esc_ops = {
  706. .enable = clk_enable_regmap,
  707. .disable = clk_disable_regmap,
  708. .get_parent = clk_rcg_get_parent,
  709. .set_parent = clk_rcg_set_parent,
  710. .recalc_rate = clk_rcg_recalc_rate,
  711. .determine_rate = clk_rcg_esc_determine_rate,
  712. .set_rate = clk_rcg_esc_set_rate,
  713. .set_rate_and_parent = clk_rcg_esc_set_rate_and_parent,
  714. };
  715. EXPORT_SYMBOL_GPL(clk_rcg_esc_ops);
  716. const struct clk_ops clk_rcg_lcc_ops = {
  717. .enable = clk_rcg_lcc_enable,
  718. .disable = clk_rcg_lcc_disable,
  719. .get_parent = clk_rcg_get_parent,
  720. .set_parent = clk_rcg_set_parent,
  721. .recalc_rate = clk_rcg_recalc_rate,
  722. .determine_rate = clk_rcg_determine_rate,
  723. .set_rate = clk_rcg_lcc_set_rate,
  724. };
  725. EXPORT_SYMBOL_GPL(clk_rcg_lcc_ops);
  726. const struct clk_ops clk_dyn_rcg_ops = {
  727. .enable = clk_enable_regmap,
  728. .is_enabled = clk_is_enabled_regmap,
  729. .disable = clk_disable_regmap,
  730. .get_parent = clk_dyn_rcg_get_parent,
  731. .set_parent = clk_dyn_rcg_set_parent,
  732. .recalc_rate = clk_dyn_rcg_recalc_rate,
  733. .determine_rate = clk_dyn_rcg_determine_rate,
  734. .set_rate = clk_dyn_rcg_set_rate,
  735. .set_rate_and_parent = clk_dyn_rcg_set_rate_and_parent,
  736. };
  737. EXPORT_SYMBOL_GPL(clk_dyn_rcg_ops);