clk-pll.c 47 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897
  1. /*
  2. * Copyright (c) 2012, 2013, NVIDIA CORPORATION. All rights reserved.
  3. *
  4. * This program is free software; you can redistribute it and/or modify it
  5. * under the terms and conditions of the GNU General Public License,
  6. * version 2, as published by the Free Software Foundation.
  7. *
  8. * This program is distributed in the hope it will be useful, but WITHOUT
  9. * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10. * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
  11. * more details.
  12. *
  13. * You should have received a copy of the GNU General Public License
  14. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  15. */
  16. #include <linux/slab.h>
  17. #include <linux/io.h>
  18. #include <linux/delay.h>
  19. #include <linux/err.h>
  20. #include <linux/clk.h>
  21. #include <linux/clk-provider.h>
  22. #include "clk.h"
  23. #define PLL_BASE_BYPASS BIT(31)
  24. #define PLL_BASE_ENABLE BIT(30)
  25. #define PLL_BASE_REF_ENABLE BIT(29)
  26. #define PLL_BASE_OVERRIDE BIT(28)
  27. #define PLL_BASE_DIVP_SHIFT 20
  28. #define PLL_BASE_DIVP_WIDTH 3
  29. #define PLL_BASE_DIVN_SHIFT 8
  30. #define PLL_BASE_DIVN_WIDTH 10
  31. #define PLL_BASE_DIVM_SHIFT 0
  32. #define PLL_BASE_DIVM_WIDTH 5
  33. #define PLLU_POST_DIVP_MASK 0x1
  34. #define PLL_MISC_DCCON_SHIFT 20
  35. #define PLL_MISC_CPCON_SHIFT 8
  36. #define PLL_MISC_CPCON_WIDTH 4
  37. #define PLL_MISC_CPCON_MASK ((1 << PLL_MISC_CPCON_WIDTH) - 1)
  38. #define PLL_MISC_LFCON_SHIFT 4
  39. #define PLL_MISC_LFCON_WIDTH 4
  40. #define PLL_MISC_LFCON_MASK ((1 << PLL_MISC_LFCON_WIDTH) - 1)
  41. #define PLL_MISC_VCOCON_SHIFT 0
  42. #define PLL_MISC_VCOCON_WIDTH 4
  43. #define PLL_MISC_VCOCON_MASK ((1 << PLL_MISC_VCOCON_WIDTH) - 1)
  44. #define OUT_OF_TABLE_CPCON 8
  45. #define PMC_PLLP_WB0_OVERRIDE 0xf8
  46. #define PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE BIT(12)
  47. #define PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE BIT(11)
  48. #define PLL_POST_LOCK_DELAY 50
  49. #define PLLDU_LFCON_SET_DIVN 600
  50. #define PLLE_BASE_DIVCML_SHIFT 24
  51. #define PLLE_BASE_DIVCML_MASK 0xf
  52. #define PLLE_BASE_DIVP_SHIFT 16
  53. #define PLLE_BASE_DIVP_WIDTH 6
  54. #define PLLE_BASE_DIVN_SHIFT 8
  55. #define PLLE_BASE_DIVN_WIDTH 8
  56. #define PLLE_BASE_DIVM_SHIFT 0
  57. #define PLLE_BASE_DIVM_WIDTH 8
  58. #define PLLE_MISC_SETUP_BASE_SHIFT 16
  59. #define PLLE_MISC_SETUP_BASE_MASK (0xffff << PLLE_MISC_SETUP_BASE_SHIFT)
  60. #define PLLE_MISC_LOCK_ENABLE BIT(9)
  61. #define PLLE_MISC_READY BIT(15)
  62. #define PLLE_MISC_SETUP_EX_SHIFT 2
  63. #define PLLE_MISC_SETUP_EX_MASK (3 << PLLE_MISC_SETUP_EX_SHIFT)
  64. #define PLLE_MISC_SETUP_MASK (PLLE_MISC_SETUP_BASE_MASK | \
  65. PLLE_MISC_SETUP_EX_MASK)
  66. #define PLLE_MISC_SETUP_VALUE (7 << PLLE_MISC_SETUP_BASE_SHIFT)
  67. #define PLLE_SS_CTRL 0x68
  68. #define PLLE_SS_CNTL_BYPASS_SS BIT(10)
  69. #define PLLE_SS_CNTL_INTERP_RESET BIT(11)
  70. #define PLLE_SS_CNTL_SSC_BYP BIT(12)
  71. #define PLLE_SS_CNTL_CENTER BIT(14)
  72. #define PLLE_SS_CNTL_INVERT BIT(15)
  73. #define PLLE_SS_DISABLE (PLLE_SS_CNTL_BYPASS_SS | PLLE_SS_CNTL_INTERP_RESET |\
  74. PLLE_SS_CNTL_SSC_BYP)
  75. #define PLLE_SS_MAX_MASK 0x1ff
  76. #define PLLE_SS_MAX_VAL 0x25
  77. #define PLLE_SS_INC_MASK (0xff << 16)
  78. #define PLLE_SS_INC_VAL (0x1 << 16)
  79. #define PLLE_SS_INCINTRV_MASK (0x3f << 24)
  80. #define PLLE_SS_INCINTRV_VAL (0x20 << 24)
  81. #define PLLE_SS_COEFFICIENTS_MASK \
  82. (PLLE_SS_MAX_MASK | PLLE_SS_INC_MASK | PLLE_SS_INCINTRV_MASK)
  83. #define PLLE_SS_COEFFICIENTS_VAL \
  84. (PLLE_SS_MAX_VAL | PLLE_SS_INC_VAL | PLLE_SS_INCINTRV_VAL)
  85. #define PLLE_AUX_PLLP_SEL BIT(2)
  86. #define PLLE_AUX_USE_LOCKDET BIT(3)
  87. #define PLLE_AUX_ENABLE_SWCTL BIT(4)
  88. #define PLLE_AUX_SS_SWCTL BIT(6)
  89. #define PLLE_AUX_SEQ_ENABLE BIT(24)
  90. #define PLLE_AUX_SEQ_START_STATE BIT(25)
  91. #define PLLE_AUX_PLLRE_SEL BIT(28)
  92. #define XUSBIO_PLL_CFG0 0x51c
  93. #define XUSBIO_PLL_CFG0_PADPLL_RESET_SWCTL BIT(0)
  94. #define XUSBIO_PLL_CFG0_CLK_ENABLE_SWCTL BIT(2)
  95. #define XUSBIO_PLL_CFG0_PADPLL_USE_LOCKDET BIT(6)
  96. #define XUSBIO_PLL_CFG0_SEQ_ENABLE BIT(24)
  97. #define XUSBIO_PLL_CFG0_SEQ_START_STATE BIT(25)
  98. #define SATA_PLL_CFG0 0x490
  99. #define SATA_PLL_CFG0_PADPLL_RESET_SWCTL BIT(0)
  100. #define SATA_PLL_CFG0_PADPLL_USE_LOCKDET BIT(2)
  101. #define SATA_PLL_CFG0_SEQ_ENABLE BIT(24)
  102. #define SATA_PLL_CFG0_SEQ_START_STATE BIT(25)
  103. #define PLLE_MISC_PLLE_PTS BIT(8)
  104. #define PLLE_MISC_IDDQ_SW_VALUE BIT(13)
  105. #define PLLE_MISC_IDDQ_SW_CTRL BIT(14)
  106. #define PLLE_MISC_VREG_BG_CTRL_SHIFT 4
  107. #define PLLE_MISC_VREG_BG_CTRL_MASK (3 << PLLE_MISC_VREG_BG_CTRL_SHIFT)
  108. #define PLLE_MISC_VREG_CTRL_SHIFT 2
  109. #define PLLE_MISC_VREG_CTRL_MASK (2 << PLLE_MISC_VREG_CTRL_SHIFT)
  110. #define PLLCX_MISC_STROBE BIT(31)
  111. #define PLLCX_MISC_RESET BIT(30)
  112. #define PLLCX_MISC_SDM_DIV_SHIFT 28
  113. #define PLLCX_MISC_SDM_DIV_MASK (0x3 << PLLCX_MISC_SDM_DIV_SHIFT)
  114. #define PLLCX_MISC_FILT_DIV_SHIFT 26
  115. #define PLLCX_MISC_FILT_DIV_MASK (0x3 << PLLCX_MISC_FILT_DIV_SHIFT)
  116. #define PLLCX_MISC_ALPHA_SHIFT 18
  117. #define PLLCX_MISC_DIV_LOW_RANGE \
  118. ((0x1 << PLLCX_MISC_SDM_DIV_SHIFT) | \
  119. (0x1 << PLLCX_MISC_FILT_DIV_SHIFT))
  120. #define PLLCX_MISC_DIV_HIGH_RANGE \
  121. ((0x2 << PLLCX_MISC_SDM_DIV_SHIFT) | \
  122. (0x2 << PLLCX_MISC_FILT_DIV_SHIFT))
  123. #define PLLCX_MISC_COEF_LOW_RANGE \
  124. ((0x14 << PLLCX_MISC_KA_SHIFT) | (0x38 << PLLCX_MISC_KB_SHIFT))
  125. #define PLLCX_MISC_KA_SHIFT 2
  126. #define PLLCX_MISC_KB_SHIFT 9
  127. #define PLLCX_MISC_DEFAULT (PLLCX_MISC_COEF_LOW_RANGE | \
  128. (0x19 << PLLCX_MISC_ALPHA_SHIFT) | \
  129. PLLCX_MISC_DIV_LOW_RANGE | \
  130. PLLCX_MISC_RESET)
  131. #define PLLCX_MISC1_DEFAULT 0x000d2308
  132. #define PLLCX_MISC2_DEFAULT 0x30211200
  133. #define PLLCX_MISC3_DEFAULT 0x200
  134. #define PMC_SATA_PWRGT 0x1ac
  135. #define PMC_SATA_PWRGT_PLLE_IDDQ_VALUE BIT(5)
  136. #define PMC_SATA_PWRGT_PLLE_IDDQ_SWCTL BIT(4)
  137. #define PLLSS_MISC_KCP 0
  138. #define PLLSS_MISC_KVCO 0
  139. #define PLLSS_MISC_SETUP 0
  140. #define PLLSS_EN_SDM 0
  141. #define PLLSS_EN_SSC 0
  142. #define PLLSS_EN_DITHER2 0
  143. #define PLLSS_EN_DITHER 1
  144. #define PLLSS_SDM_RESET 0
  145. #define PLLSS_CLAMP 0
  146. #define PLLSS_SDM_SSC_MAX 0
  147. #define PLLSS_SDM_SSC_MIN 0
  148. #define PLLSS_SDM_SSC_STEP 0
  149. #define PLLSS_SDM_DIN 0
  150. #define PLLSS_MISC_DEFAULT ((PLLSS_MISC_KCP << 25) | \
  151. (PLLSS_MISC_KVCO << 24) | \
  152. PLLSS_MISC_SETUP)
  153. #define PLLSS_CFG_DEFAULT ((PLLSS_EN_SDM << 31) | \
  154. (PLLSS_EN_SSC << 30) | \
  155. (PLLSS_EN_DITHER2 << 29) | \
  156. (PLLSS_EN_DITHER << 28) | \
  157. (PLLSS_SDM_RESET) << 27 | \
  158. (PLLSS_CLAMP << 22))
  159. #define PLLSS_CTRL1_DEFAULT \
  160. ((PLLSS_SDM_SSC_MAX << 16) | PLLSS_SDM_SSC_MIN)
  161. #define PLLSS_CTRL2_DEFAULT \
  162. ((PLLSS_SDM_SSC_STEP << 16) | PLLSS_SDM_DIN)
  163. #define PLLSS_LOCK_OVERRIDE BIT(24)
  164. #define PLLSS_REF_SRC_SEL_SHIFT 25
  165. #define PLLSS_REF_SRC_SEL_MASK (3 << PLLSS_REF_SRC_SEL_SHIFT)
  166. #define pll_readl(offset, p) readl_relaxed(p->clk_base + offset)
  167. #define pll_readl_base(p) pll_readl(p->params->base_reg, p)
  168. #define pll_readl_misc(p) pll_readl(p->params->misc_reg, p)
  169. #define pll_override_readl(offset, p) readl_relaxed(p->pmc + offset)
  170. #define pll_writel(val, offset, p) writel_relaxed(val, p->clk_base + offset)
  171. #define pll_writel_base(val, p) pll_writel(val, p->params->base_reg, p)
  172. #define pll_writel_misc(val, p) pll_writel(val, p->params->misc_reg, p)
  173. #define pll_override_writel(val, offset, p) writel(val, p->pmc + offset)
  174. #define mask(w) ((1 << (w)) - 1)
  175. #define divm_mask(p) mask(p->params->div_nmp->divm_width)
  176. #define divn_mask(p) mask(p->params->div_nmp->divn_width)
  177. #define divp_mask(p) (p->params->flags & TEGRA_PLLU ? PLLU_POST_DIVP_MASK :\
  178. mask(p->params->div_nmp->divp_width))
  179. #define divm_shift(p) (p)->params->div_nmp->divm_shift
  180. #define divn_shift(p) (p)->params->div_nmp->divn_shift
  181. #define divp_shift(p) (p)->params->div_nmp->divp_shift
  182. #define divm_mask_shifted(p) (divm_mask(p) << divm_shift(p))
  183. #define divn_mask_shifted(p) (divn_mask(p) << divn_shift(p))
  184. #define divp_mask_shifted(p) (divp_mask(p) << divp_shift(p))
  185. #define divm_max(p) (divm_mask(p))
  186. #define divn_max(p) (divn_mask(p))
  187. #define divp_max(p) (1 << (divp_mask(p)))
  188. static struct div_nmp default_nmp = {
  189. .divn_shift = PLL_BASE_DIVN_SHIFT,
  190. .divn_width = PLL_BASE_DIVN_WIDTH,
  191. .divm_shift = PLL_BASE_DIVM_SHIFT,
  192. .divm_width = PLL_BASE_DIVM_WIDTH,
  193. .divp_shift = PLL_BASE_DIVP_SHIFT,
  194. .divp_width = PLL_BASE_DIVP_WIDTH,
  195. };
  196. static void clk_pll_enable_lock(struct tegra_clk_pll *pll)
  197. {
  198. u32 val;
  199. if (!(pll->params->flags & TEGRA_PLL_USE_LOCK))
  200. return;
  201. if (!(pll->params->flags & TEGRA_PLL_HAS_LOCK_ENABLE))
  202. return;
  203. val = pll_readl_misc(pll);
  204. val |= BIT(pll->params->lock_enable_bit_idx);
  205. pll_writel_misc(val, pll);
  206. }
  207. static int clk_pll_wait_for_lock(struct tegra_clk_pll *pll)
  208. {
  209. int i;
  210. u32 val, lock_mask;
  211. void __iomem *lock_addr;
  212. if (!(pll->params->flags & TEGRA_PLL_USE_LOCK)) {
  213. udelay(pll->params->lock_delay);
  214. return 0;
  215. }
  216. lock_addr = pll->clk_base;
  217. if (pll->params->flags & TEGRA_PLL_LOCK_MISC)
  218. lock_addr += pll->params->misc_reg;
  219. else
  220. lock_addr += pll->params->base_reg;
  221. lock_mask = pll->params->lock_mask;
  222. for (i = 0; i < pll->params->lock_delay; i++) {
  223. val = readl_relaxed(lock_addr);
  224. if ((val & lock_mask) == lock_mask) {
  225. udelay(PLL_POST_LOCK_DELAY);
  226. return 0;
  227. }
  228. udelay(2); /* timeout = 2 * lock time */
  229. }
  230. pr_err("%s: Timed out waiting for pll %s lock\n", __func__,
  231. clk_hw_get_name(&pll->hw));
  232. return -1;
  233. }
  234. static int clk_pll_is_enabled(struct clk_hw *hw)
  235. {
  236. struct tegra_clk_pll *pll = to_clk_pll(hw);
  237. u32 val;
  238. if (pll->params->flags & TEGRA_PLLM) {
  239. val = readl_relaxed(pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  240. if (val & PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE)
  241. return val & PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE ? 1 : 0;
  242. }
  243. val = pll_readl_base(pll);
  244. return val & PLL_BASE_ENABLE ? 1 : 0;
  245. }
  246. static void _clk_pll_enable(struct clk_hw *hw)
  247. {
  248. struct tegra_clk_pll *pll = to_clk_pll(hw);
  249. u32 val;
  250. clk_pll_enable_lock(pll);
  251. val = pll_readl_base(pll);
  252. if (pll->params->flags & TEGRA_PLL_BYPASS)
  253. val &= ~PLL_BASE_BYPASS;
  254. val |= PLL_BASE_ENABLE;
  255. pll_writel_base(val, pll);
  256. if (pll->params->flags & TEGRA_PLLM) {
  257. val = readl_relaxed(pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  258. val |= PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE;
  259. writel_relaxed(val, pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  260. }
  261. }
  262. static void _clk_pll_disable(struct clk_hw *hw)
  263. {
  264. struct tegra_clk_pll *pll = to_clk_pll(hw);
  265. u32 val;
  266. val = pll_readl_base(pll);
  267. if (pll->params->flags & TEGRA_PLL_BYPASS)
  268. val &= ~PLL_BASE_BYPASS;
  269. val &= ~PLL_BASE_ENABLE;
  270. pll_writel_base(val, pll);
  271. if (pll->params->flags & TEGRA_PLLM) {
  272. val = readl_relaxed(pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  273. val &= ~PMC_PLLP_WB0_OVERRIDE_PLLM_ENABLE;
  274. writel_relaxed(val, pll->pmc + PMC_PLLP_WB0_OVERRIDE);
  275. }
  276. }
  277. static int clk_pll_enable(struct clk_hw *hw)
  278. {
  279. struct tegra_clk_pll *pll = to_clk_pll(hw);
  280. unsigned long flags = 0;
  281. int ret;
  282. if (pll->lock)
  283. spin_lock_irqsave(pll->lock, flags);
  284. _clk_pll_enable(hw);
  285. ret = clk_pll_wait_for_lock(pll);
  286. if (pll->lock)
  287. spin_unlock_irqrestore(pll->lock, flags);
  288. return ret;
  289. }
  290. static void clk_pll_disable(struct clk_hw *hw)
  291. {
  292. struct tegra_clk_pll *pll = to_clk_pll(hw);
  293. unsigned long flags = 0;
  294. if (pll->lock)
  295. spin_lock_irqsave(pll->lock, flags);
  296. _clk_pll_disable(hw);
  297. if (pll->lock)
  298. spin_unlock_irqrestore(pll->lock, flags);
  299. }
  300. static int _p_div_to_hw(struct clk_hw *hw, u8 p_div)
  301. {
  302. struct tegra_clk_pll *pll = to_clk_pll(hw);
  303. struct pdiv_map *p_tohw = pll->params->pdiv_tohw;
  304. if (p_tohw) {
  305. while (p_tohw->pdiv) {
  306. if (p_div <= p_tohw->pdiv)
  307. return p_tohw->hw_val;
  308. p_tohw++;
  309. }
  310. return -EINVAL;
  311. }
  312. return -EINVAL;
  313. }
  314. static int _hw_to_p_div(struct clk_hw *hw, u8 p_div_hw)
  315. {
  316. struct tegra_clk_pll *pll = to_clk_pll(hw);
  317. struct pdiv_map *p_tohw = pll->params->pdiv_tohw;
  318. if (p_tohw) {
  319. while (p_tohw->pdiv) {
  320. if (p_div_hw == p_tohw->hw_val)
  321. return p_tohw->pdiv;
  322. p_tohw++;
  323. }
  324. return -EINVAL;
  325. }
  326. return 1 << p_div_hw;
  327. }
  328. static int _get_table_rate(struct clk_hw *hw,
  329. struct tegra_clk_pll_freq_table *cfg,
  330. unsigned long rate, unsigned long parent_rate)
  331. {
  332. struct tegra_clk_pll *pll = to_clk_pll(hw);
  333. struct tegra_clk_pll_freq_table *sel;
  334. for (sel = pll->params->freq_table; sel->input_rate != 0; sel++)
  335. if (sel->input_rate == parent_rate &&
  336. sel->output_rate == rate)
  337. break;
  338. if (sel->input_rate == 0)
  339. return -EINVAL;
  340. cfg->input_rate = sel->input_rate;
  341. cfg->output_rate = sel->output_rate;
  342. cfg->m = sel->m;
  343. cfg->n = sel->n;
  344. cfg->p = sel->p;
  345. cfg->cpcon = sel->cpcon;
  346. return 0;
  347. }
  348. static int _calc_rate(struct clk_hw *hw, struct tegra_clk_pll_freq_table *cfg,
  349. unsigned long rate, unsigned long parent_rate)
  350. {
  351. struct tegra_clk_pll *pll = to_clk_pll(hw);
  352. unsigned long cfreq;
  353. u32 p_div = 0;
  354. int ret;
  355. switch (parent_rate) {
  356. case 12000000:
  357. case 26000000:
  358. cfreq = (rate <= 1000000 * 1000) ? 1000000 : 2000000;
  359. break;
  360. case 13000000:
  361. cfreq = (rate <= 1000000 * 1000) ? 1000000 : 2600000;
  362. break;
  363. case 16800000:
  364. case 19200000:
  365. cfreq = (rate <= 1200000 * 1000) ? 1200000 : 2400000;
  366. break;
  367. case 9600000:
  368. case 28800000:
  369. /*
  370. * PLL_P_OUT1 rate is not listed in PLLA table
  371. */
  372. cfreq = parent_rate/(parent_rate/1000000);
  373. break;
  374. default:
  375. pr_err("%s Unexpected reference rate %lu\n",
  376. __func__, parent_rate);
  377. BUG();
  378. }
  379. /* Raise VCO to guarantee 0.5% accuracy */
  380. for (cfg->output_rate = rate; cfg->output_rate < 200 * cfreq;
  381. cfg->output_rate <<= 1)
  382. p_div++;
  383. cfg->m = parent_rate / cfreq;
  384. cfg->n = cfg->output_rate / cfreq;
  385. cfg->cpcon = OUT_OF_TABLE_CPCON;
  386. if (cfg->m > divm_max(pll) || cfg->n > divn_max(pll) ||
  387. (1 << p_div) > divp_max(pll)
  388. || cfg->output_rate > pll->params->vco_max) {
  389. return -EINVAL;
  390. }
  391. cfg->output_rate >>= p_div;
  392. if (pll->params->pdiv_tohw) {
  393. ret = _p_div_to_hw(hw, 1 << p_div);
  394. if (ret < 0)
  395. return ret;
  396. else
  397. cfg->p = ret;
  398. } else
  399. cfg->p = p_div;
  400. return 0;
  401. }
  402. static void _update_pll_mnp(struct tegra_clk_pll *pll,
  403. struct tegra_clk_pll_freq_table *cfg)
  404. {
  405. u32 val;
  406. struct tegra_clk_pll_params *params = pll->params;
  407. struct div_nmp *div_nmp = params->div_nmp;
  408. if ((params->flags & TEGRA_PLLM) &&
  409. (pll_override_readl(PMC_PLLP_WB0_OVERRIDE, pll) &
  410. PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE)) {
  411. val = pll_override_readl(params->pmc_divp_reg, pll);
  412. val &= ~(divp_mask(pll) << div_nmp->override_divp_shift);
  413. val |= cfg->p << div_nmp->override_divp_shift;
  414. pll_override_writel(val, params->pmc_divp_reg, pll);
  415. val = pll_override_readl(params->pmc_divnm_reg, pll);
  416. val &= ~(divm_mask(pll) << div_nmp->override_divm_shift) |
  417. ~(divn_mask(pll) << div_nmp->override_divn_shift);
  418. val |= (cfg->m << div_nmp->override_divm_shift) |
  419. (cfg->n << div_nmp->override_divn_shift);
  420. pll_override_writel(val, params->pmc_divnm_reg, pll);
  421. } else {
  422. val = pll_readl_base(pll);
  423. val &= ~(divm_mask_shifted(pll) | divn_mask_shifted(pll) |
  424. divp_mask_shifted(pll));
  425. val |= (cfg->m << divm_shift(pll)) |
  426. (cfg->n << divn_shift(pll)) |
  427. (cfg->p << divp_shift(pll));
  428. pll_writel_base(val, pll);
  429. }
  430. }
  431. static void _get_pll_mnp(struct tegra_clk_pll *pll,
  432. struct tegra_clk_pll_freq_table *cfg)
  433. {
  434. u32 val;
  435. struct tegra_clk_pll_params *params = pll->params;
  436. struct div_nmp *div_nmp = params->div_nmp;
  437. if ((params->flags & TEGRA_PLLM) &&
  438. (pll_override_readl(PMC_PLLP_WB0_OVERRIDE, pll) &
  439. PMC_PLLP_WB0_OVERRIDE_PLLM_OVERRIDE)) {
  440. val = pll_override_readl(params->pmc_divp_reg, pll);
  441. cfg->p = (val >> div_nmp->override_divp_shift) & divp_mask(pll);
  442. val = pll_override_readl(params->pmc_divnm_reg, pll);
  443. cfg->m = (val >> div_nmp->override_divm_shift) & divm_mask(pll);
  444. cfg->n = (val >> div_nmp->override_divn_shift) & divn_mask(pll);
  445. } else {
  446. val = pll_readl_base(pll);
  447. cfg->m = (val >> div_nmp->divm_shift) & divm_mask(pll);
  448. cfg->n = (val >> div_nmp->divn_shift) & divn_mask(pll);
  449. cfg->p = (val >> div_nmp->divp_shift) & divp_mask(pll);
  450. }
  451. }
  452. static void _update_pll_cpcon(struct tegra_clk_pll *pll,
  453. struct tegra_clk_pll_freq_table *cfg,
  454. unsigned long rate)
  455. {
  456. u32 val;
  457. val = pll_readl_misc(pll);
  458. val &= ~(PLL_MISC_CPCON_MASK << PLL_MISC_CPCON_SHIFT);
  459. val |= cfg->cpcon << PLL_MISC_CPCON_SHIFT;
  460. if (pll->params->flags & TEGRA_PLL_SET_LFCON) {
  461. val &= ~(PLL_MISC_LFCON_MASK << PLL_MISC_LFCON_SHIFT);
  462. if (cfg->n >= PLLDU_LFCON_SET_DIVN)
  463. val |= 1 << PLL_MISC_LFCON_SHIFT;
  464. } else if (pll->params->flags & TEGRA_PLL_SET_DCCON) {
  465. val &= ~(1 << PLL_MISC_DCCON_SHIFT);
  466. if (rate >= (pll->params->vco_max >> 1))
  467. val |= 1 << PLL_MISC_DCCON_SHIFT;
  468. }
  469. pll_writel_misc(val, pll);
  470. }
  471. static int _program_pll(struct clk_hw *hw, struct tegra_clk_pll_freq_table *cfg,
  472. unsigned long rate)
  473. {
  474. struct tegra_clk_pll *pll = to_clk_pll(hw);
  475. int state, ret = 0;
  476. state = clk_pll_is_enabled(hw);
  477. if (state)
  478. _clk_pll_disable(hw);
  479. _update_pll_mnp(pll, cfg);
  480. if (pll->params->flags & TEGRA_PLL_HAS_CPCON)
  481. _update_pll_cpcon(pll, cfg, rate);
  482. if (state) {
  483. _clk_pll_enable(hw);
  484. ret = clk_pll_wait_for_lock(pll);
  485. }
  486. return ret;
  487. }
  488. static int clk_pll_set_rate(struct clk_hw *hw, unsigned long rate,
  489. unsigned long parent_rate)
  490. {
  491. struct tegra_clk_pll *pll = to_clk_pll(hw);
  492. struct tegra_clk_pll_freq_table cfg, old_cfg;
  493. unsigned long flags = 0;
  494. int ret = 0;
  495. if (pll->params->flags & TEGRA_PLL_FIXED) {
  496. if (rate != pll->params->fixed_rate) {
  497. pr_err("%s: Can not change %s fixed rate %lu to %lu\n",
  498. __func__, clk_hw_get_name(hw),
  499. pll->params->fixed_rate, rate);
  500. return -EINVAL;
  501. }
  502. return 0;
  503. }
  504. if (_get_table_rate(hw, &cfg, rate, parent_rate) &&
  505. _calc_rate(hw, &cfg, rate, parent_rate)) {
  506. pr_err("%s: Failed to set %s rate %lu\n", __func__,
  507. clk_hw_get_name(hw), rate);
  508. WARN_ON(1);
  509. return -EINVAL;
  510. }
  511. if (pll->lock)
  512. spin_lock_irqsave(pll->lock, flags);
  513. _get_pll_mnp(pll, &old_cfg);
  514. if (old_cfg.m != cfg.m || old_cfg.n != cfg.n || old_cfg.p != cfg.p)
  515. ret = _program_pll(hw, &cfg, rate);
  516. if (pll->lock)
  517. spin_unlock_irqrestore(pll->lock, flags);
  518. return ret;
  519. }
  520. static long clk_pll_round_rate(struct clk_hw *hw, unsigned long rate,
  521. unsigned long *prate)
  522. {
  523. struct tegra_clk_pll *pll = to_clk_pll(hw);
  524. struct tegra_clk_pll_freq_table cfg;
  525. if (pll->params->flags & TEGRA_PLL_FIXED)
  526. return pll->params->fixed_rate;
  527. /* PLLM is used for memory; we do not change rate */
  528. if (pll->params->flags & TEGRA_PLLM)
  529. return clk_hw_get_rate(hw);
  530. if (_get_table_rate(hw, &cfg, rate, *prate) &&
  531. _calc_rate(hw, &cfg, rate, *prate))
  532. return -EINVAL;
  533. return cfg.output_rate;
  534. }
  535. static unsigned long clk_pll_recalc_rate(struct clk_hw *hw,
  536. unsigned long parent_rate)
  537. {
  538. struct tegra_clk_pll *pll = to_clk_pll(hw);
  539. struct tegra_clk_pll_freq_table cfg;
  540. u32 val;
  541. u64 rate = parent_rate;
  542. int pdiv;
  543. val = pll_readl_base(pll);
  544. if ((pll->params->flags & TEGRA_PLL_BYPASS) && (val & PLL_BASE_BYPASS))
  545. return parent_rate;
  546. if ((pll->params->flags & TEGRA_PLL_FIXED) &&
  547. !(val & PLL_BASE_OVERRIDE)) {
  548. struct tegra_clk_pll_freq_table sel;
  549. if (_get_table_rate(hw, &sel, pll->params->fixed_rate,
  550. parent_rate)) {
  551. pr_err("Clock %s has unknown fixed frequency\n",
  552. clk_hw_get_name(hw));
  553. BUG();
  554. }
  555. return pll->params->fixed_rate;
  556. }
  557. _get_pll_mnp(pll, &cfg);
  558. pdiv = _hw_to_p_div(hw, cfg.p);
  559. if (pdiv < 0) {
  560. WARN_ON(1);
  561. pdiv = 1;
  562. }
  563. cfg.m *= pdiv;
  564. rate *= cfg.n;
  565. do_div(rate, cfg.m);
  566. return rate;
  567. }
  568. static int clk_plle_training(struct tegra_clk_pll *pll)
  569. {
  570. u32 val;
  571. unsigned long timeout;
  572. if (!pll->pmc)
  573. return -ENOSYS;
  574. /*
  575. * PLLE is already disabled, and setup cleared;
  576. * create falling edge on PLLE IDDQ input.
  577. */
  578. val = readl(pll->pmc + PMC_SATA_PWRGT);
  579. val |= PMC_SATA_PWRGT_PLLE_IDDQ_VALUE;
  580. writel(val, pll->pmc + PMC_SATA_PWRGT);
  581. val = readl(pll->pmc + PMC_SATA_PWRGT);
  582. val |= PMC_SATA_PWRGT_PLLE_IDDQ_SWCTL;
  583. writel(val, pll->pmc + PMC_SATA_PWRGT);
  584. val = readl(pll->pmc + PMC_SATA_PWRGT);
  585. val &= ~PMC_SATA_PWRGT_PLLE_IDDQ_VALUE;
  586. writel(val, pll->pmc + PMC_SATA_PWRGT);
  587. val = pll_readl_misc(pll);
  588. timeout = jiffies + msecs_to_jiffies(100);
  589. while (1) {
  590. val = pll_readl_misc(pll);
  591. if (val & PLLE_MISC_READY)
  592. break;
  593. if (time_after(jiffies, timeout)) {
  594. pr_err("%s: timeout waiting for PLLE\n", __func__);
  595. return -EBUSY;
  596. }
  597. udelay(300);
  598. }
  599. return 0;
  600. }
  601. static int clk_plle_enable(struct clk_hw *hw)
  602. {
  603. struct tegra_clk_pll *pll = to_clk_pll(hw);
  604. unsigned long input_rate = clk_get_rate(clk_get_parent(hw->clk));
  605. struct tegra_clk_pll_freq_table sel;
  606. u32 val;
  607. int err;
  608. if (_get_table_rate(hw, &sel, pll->params->fixed_rate, input_rate))
  609. return -EINVAL;
  610. clk_pll_disable(hw);
  611. val = pll_readl_misc(pll);
  612. val &= ~(PLLE_MISC_LOCK_ENABLE | PLLE_MISC_SETUP_MASK);
  613. pll_writel_misc(val, pll);
  614. val = pll_readl_misc(pll);
  615. if (!(val & PLLE_MISC_READY)) {
  616. err = clk_plle_training(pll);
  617. if (err)
  618. return err;
  619. }
  620. if (pll->params->flags & TEGRA_PLLE_CONFIGURE) {
  621. /* configure dividers */
  622. val = pll_readl_base(pll);
  623. val &= ~(divp_mask_shifted(pll) | divn_mask_shifted(pll) |
  624. divm_mask_shifted(pll));
  625. val &= ~(PLLE_BASE_DIVCML_MASK << PLLE_BASE_DIVCML_SHIFT);
  626. val |= sel.m << divm_shift(pll);
  627. val |= sel.n << divn_shift(pll);
  628. val |= sel.p << divp_shift(pll);
  629. val |= sel.cpcon << PLLE_BASE_DIVCML_SHIFT;
  630. pll_writel_base(val, pll);
  631. }
  632. val = pll_readl_misc(pll);
  633. val |= PLLE_MISC_SETUP_VALUE;
  634. val |= PLLE_MISC_LOCK_ENABLE;
  635. pll_writel_misc(val, pll);
  636. val = readl(pll->clk_base + PLLE_SS_CTRL);
  637. val &= ~PLLE_SS_COEFFICIENTS_MASK;
  638. val |= PLLE_SS_DISABLE;
  639. writel(val, pll->clk_base + PLLE_SS_CTRL);
  640. val = pll_readl_base(pll);
  641. val |= (PLL_BASE_BYPASS | PLL_BASE_ENABLE);
  642. pll_writel_base(val, pll);
  643. clk_pll_wait_for_lock(pll);
  644. return 0;
  645. }
  646. static unsigned long clk_plle_recalc_rate(struct clk_hw *hw,
  647. unsigned long parent_rate)
  648. {
  649. struct tegra_clk_pll *pll = to_clk_pll(hw);
  650. u32 val = pll_readl_base(pll);
  651. u32 divn = 0, divm = 0, divp = 0;
  652. u64 rate = parent_rate;
  653. divp = (val >> pll->params->div_nmp->divp_shift) & (divp_mask(pll));
  654. divn = (val >> pll->params->div_nmp->divn_shift) & (divn_mask(pll));
  655. divm = (val >> pll->params->div_nmp->divm_shift) & (divm_mask(pll));
  656. divm *= divp;
  657. rate *= divn;
  658. do_div(rate, divm);
  659. return rate;
  660. }
  661. const struct clk_ops tegra_clk_pll_ops = {
  662. .is_enabled = clk_pll_is_enabled,
  663. .enable = clk_pll_enable,
  664. .disable = clk_pll_disable,
  665. .recalc_rate = clk_pll_recalc_rate,
  666. .round_rate = clk_pll_round_rate,
  667. .set_rate = clk_pll_set_rate,
  668. };
  669. const struct clk_ops tegra_clk_plle_ops = {
  670. .recalc_rate = clk_plle_recalc_rate,
  671. .is_enabled = clk_pll_is_enabled,
  672. .disable = clk_pll_disable,
  673. .enable = clk_plle_enable,
  674. };
  675. #if defined(CONFIG_ARCH_TEGRA_114_SOC) || \
  676. defined(CONFIG_ARCH_TEGRA_124_SOC) || \
  677. defined(CONFIG_ARCH_TEGRA_132_SOC)
  678. static int _pll_fixed_mdiv(struct tegra_clk_pll_params *pll_params,
  679. unsigned long parent_rate)
  680. {
  681. if (parent_rate > pll_params->cf_max)
  682. return 2;
  683. else
  684. return 1;
  685. }
  686. static unsigned long _clip_vco_min(unsigned long vco_min,
  687. unsigned long parent_rate)
  688. {
  689. return DIV_ROUND_UP(vco_min, parent_rate) * parent_rate;
  690. }
  691. static int _setup_dynamic_ramp(struct tegra_clk_pll_params *pll_params,
  692. void __iomem *clk_base,
  693. unsigned long parent_rate)
  694. {
  695. u32 val;
  696. u32 step_a, step_b;
  697. switch (parent_rate) {
  698. case 12000000:
  699. case 13000000:
  700. case 26000000:
  701. step_a = 0x2B;
  702. step_b = 0x0B;
  703. break;
  704. case 16800000:
  705. step_a = 0x1A;
  706. step_b = 0x09;
  707. break;
  708. case 19200000:
  709. step_a = 0x12;
  710. step_b = 0x08;
  711. break;
  712. default:
  713. pr_err("%s: Unexpected reference rate %lu\n",
  714. __func__, parent_rate);
  715. WARN_ON(1);
  716. return -EINVAL;
  717. }
  718. val = step_a << pll_params->stepa_shift;
  719. val |= step_b << pll_params->stepb_shift;
  720. writel_relaxed(val, clk_base + pll_params->dyn_ramp_reg);
  721. return 0;
  722. }
  723. static int clk_pll_iddq_enable(struct clk_hw *hw)
  724. {
  725. struct tegra_clk_pll *pll = to_clk_pll(hw);
  726. unsigned long flags = 0;
  727. u32 val;
  728. int ret;
  729. if (pll->lock)
  730. spin_lock_irqsave(pll->lock, flags);
  731. val = pll_readl(pll->params->iddq_reg, pll);
  732. val &= ~BIT(pll->params->iddq_bit_idx);
  733. pll_writel(val, pll->params->iddq_reg, pll);
  734. udelay(2);
  735. _clk_pll_enable(hw);
  736. ret = clk_pll_wait_for_lock(pll);
  737. if (pll->lock)
  738. spin_unlock_irqrestore(pll->lock, flags);
  739. return 0;
  740. }
  741. static void clk_pll_iddq_disable(struct clk_hw *hw)
  742. {
  743. struct tegra_clk_pll *pll = to_clk_pll(hw);
  744. unsigned long flags = 0;
  745. u32 val;
  746. if (pll->lock)
  747. spin_lock_irqsave(pll->lock, flags);
  748. _clk_pll_disable(hw);
  749. val = pll_readl(pll->params->iddq_reg, pll);
  750. val |= BIT(pll->params->iddq_bit_idx);
  751. pll_writel(val, pll->params->iddq_reg, pll);
  752. udelay(2);
  753. if (pll->lock)
  754. spin_unlock_irqrestore(pll->lock, flags);
  755. }
  756. static int _calc_dynamic_ramp_rate(struct clk_hw *hw,
  757. struct tegra_clk_pll_freq_table *cfg,
  758. unsigned long rate, unsigned long parent_rate)
  759. {
  760. struct tegra_clk_pll *pll = to_clk_pll(hw);
  761. unsigned int p;
  762. int p_div;
  763. if (!rate)
  764. return -EINVAL;
  765. p = DIV_ROUND_UP(pll->params->vco_min, rate);
  766. cfg->m = _pll_fixed_mdiv(pll->params, parent_rate);
  767. cfg->output_rate = rate * p;
  768. cfg->n = cfg->output_rate * cfg->m / parent_rate;
  769. p_div = _p_div_to_hw(hw, p);
  770. if (p_div < 0)
  771. return p_div;
  772. else
  773. cfg->p = p_div;
  774. if (cfg->n > divn_max(pll) || cfg->output_rate > pll->params->vco_max)
  775. return -EINVAL;
  776. return 0;
  777. }
  778. static int _pll_ramp_calc_pll(struct clk_hw *hw,
  779. struct tegra_clk_pll_freq_table *cfg,
  780. unsigned long rate, unsigned long parent_rate)
  781. {
  782. struct tegra_clk_pll *pll = to_clk_pll(hw);
  783. int err = 0, p_div;
  784. err = _get_table_rate(hw, cfg, rate, parent_rate);
  785. if (err < 0)
  786. err = _calc_dynamic_ramp_rate(hw, cfg, rate, parent_rate);
  787. else {
  788. if (cfg->m != _pll_fixed_mdiv(pll->params, parent_rate)) {
  789. WARN_ON(1);
  790. err = -EINVAL;
  791. goto out;
  792. }
  793. p_div = _p_div_to_hw(hw, cfg->p);
  794. if (p_div < 0)
  795. return p_div;
  796. else
  797. cfg->p = p_div;
  798. }
  799. if (cfg->p > pll->params->max_p)
  800. err = -EINVAL;
  801. out:
  802. return err;
  803. }
  804. static int clk_pllxc_set_rate(struct clk_hw *hw, unsigned long rate,
  805. unsigned long parent_rate)
  806. {
  807. struct tegra_clk_pll *pll = to_clk_pll(hw);
  808. struct tegra_clk_pll_freq_table cfg, old_cfg;
  809. unsigned long flags = 0;
  810. int ret;
  811. ret = _pll_ramp_calc_pll(hw, &cfg, rate, parent_rate);
  812. if (ret < 0)
  813. return ret;
  814. if (pll->lock)
  815. spin_lock_irqsave(pll->lock, flags);
  816. _get_pll_mnp(pll, &old_cfg);
  817. if (old_cfg.m != cfg.m || old_cfg.n != cfg.n || old_cfg.p != cfg.p)
  818. ret = _program_pll(hw, &cfg, rate);
  819. if (pll->lock)
  820. spin_unlock_irqrestore(pll->lock, flags);
  821. return ret;
  822. }
  823. static long clk_pll_ramp_round_rate(struct clk_hw *hw, unsigned long rate,
  824. unsigned long *prate)
  825. {
  826. struct tegra_clk_pll_freq_table cfg;
  827. int ret, p_div;
  828. u64 output_rate = *prate;
  829. ret = _pll_ramp_calc_pll(hw, &cfg, rate, *prate);
  830. if (ret < 0)
  831. return ret;
  832. p_div = _hw_to_p_div(hw, cfg.p);
  833. if (p_div < 0)
  834. return p_div;
  835. output_rate *= cfg.n;
  836. do_div(output_rate, cfg.m * p_div);
  837. return output_rate;
  838. }
  839. static int clk_pllm_set_rate(struct clk_hw *hw, unsigned long rate,
  840. unsigned long parent_rate)
  841. {
  842. struct tegra_clk_pll_freq_table cfg;
  843. struct tegra_clk_pll *pll = to_clk_pll(hw);
  844. unsigned long flags = 0;
  845. int state, ret = 0;
  846. if (pll->lock)
  847. spin_lock_irqsave(pll->lock, flags);
  848. state = clk_pll_is_enabled(hw);
  849. if (state) {
  850. if (rate != clk_get_rate(hw->clk)) {
  851. pr_err("%s: Cannot change active PLLM\n", __func__);
  852. ret = -EINVAL;
  853. goto out;
  854. }
  855. goto out;
  856. }
  857. ret = _pll_ramp_calc_pll(hw, &cfg, rate, parent_rate);
  858. if (ret < 0)
  859. goto out;
  860. _update_pll_mnp(pll, &cfg);
  861. out:
  862. if (pll->lock)
  863. spin_unlock_irqrestore(pll->lock, flags);
  864. return ret;
  865. }
  866. static void _pllcx_strobe(struct tegra_clk_pll *pll)
  867. {
  868. u32 val;
  869. val = pll_readl_misc(pll);
  870. val |= PLLCX_MISC_STROBE;
  871. pll_writel_misc(val, pll);
  872. udelay(2);
  873. val &= ~PLLCX_MISC_STROBE;
  874. pll_writel_misc(val, pll);
  875. }
  876. static int clk_pllc_enable(struct clk_hw *hw)
  877. {
  878. struct tegra_clk_pll *pll = to_clk_pll(hw);
  879. u32 val;
  880. int ret;
  881. unsigned long flags = 0;
  882. if (pll->lock)
  883. spin_lock_irqsave(pll->lock, flags);
  884. _clk_pll_enable(hw);
  885. udelay(2);
  886. val = pll_readl_misc(pll);
  887. val &= ~PLLCX_MISC_RESET;
  888. pll_writel_misc(val, pll);
  889. udelay(2);
  890. _pllcx_strobe(pll);
  891. ret = clk_pll_wait_for_lock(pll);
  892. if (pll->lock)
  893. spin_unlock_irqrestore(pll->lock, flags);
  894. return ret;
  895. }
  896. static void _clk_pllc_disable(struct clk_hw *hw)
  897. {
  898. struct tegra_clk_pll *pll = to_clk_pll(hw);
  899. u32 val;
  900. _clk_pll_disable(hw);
  901. val = pll_readl_misc(pll);
  902. val |= PLLCX_MISC_RESET;
  903. pll_writel_misc(val, pll);
  904. udelay(2);
  905. }
  906. static void clk_pllc_disable(struct clk_hw *hw)
  907. {
  908. struct tegra_clk_pll *pll = to_clk_pll(hw);
  909. unsigned long flags = 0;
  910. if (pll->lock)
  911. spin_lock_irqsave(pll->lock, flags);
  912. _clk_pllc_disable(hw);
  913. if (pll->lock)
  914. spin_unlock_irqrestore(pll->lock, flags);
  915. }
  916. static int _pllcx_update_dynamic_coef(struct tegra_clk_pll *pll,
  917. unsigned long input_rate, u32 n)
  918. {
  919. u32 val, n_threshold;
  920. switch (input_rate) {
  921. case 12000000:
  922. n_threshold = 70;
  923. break;
  924. case 13000000:
  925. case 26000000:
  926. n_threshold = 71;
  927. break;
  928. case 16800000:
  929. n_threshold = 55;
  930. break;
  931. case 19200000:
  932. n_threshold = 48;
  933. break;
  934. default:
  935. pr_err("%s: Unexpected reference rate %lu\n",
  936. __func__, input_rate);
  937. return -EINVAL;
  938. }
  939. val = pll_readl_misc(pll);
  940. val &= ~(PLLCX_MISC_SDM_DIV_MASK | PLLCX_MISC_FILT_DIV_MASK);
  941. val |= n <= n_threshold ?
  942. PLLCX_MISC_DIV_LOW_RANGE : PLLCX_MISC_DIV_HIGH_RANGE;
  943. pll_writel_misc(val, pll);
  944. return 0;
  945. }
  946. static int clk_pllc_set_rate(struct clk_hw *hw, unsigned long rate,
  947. unsigned long parent_rate)
  948. {
  949. struct tegra_clk_pll_freq_table cfg, old_cfg;
  950. struct tegra_clk_pll *pll = to_clk_pll(hw);
  951. unsigned long flags = 0;
  952. int state, ret = 0;
  953. if (pll->lock)
  954. spin_lock_irqsave(pll->lock, flags);
  955. ret = _pll_ramp_calc_pll(hw, &cfg, rate, parent_rate);
  956. if (ret < 0)
  957. goto out;
  958. _get_pll_mnp(pll, &old_cfg);
  959. if (cfg.m != old_cfg.m) {
  960. WARN_ON(1);
  961. goto out;
  962. }
  963. if (old_cfg.n == cfg.n && old_cfg.p == cfg.p)
  964. goto out;
  965. state = clk_pll_is_enabled(hw);
  966. if (state)
  967. _clk_pllc_disable(hw);
  968. ret = _pllcx_update_dynamic_coef(pll, parent_rate, cfg.n);
  969. if (ret < 0)
  970. goto out;
  971. _update_pll_mnp(pll, &cfg);
  972. if (state)
  973. ret = clk_pllc_enable(hw);
  974. out:
  975. if (pll->lock)
  976. spin_unlock_irqrestore(pll->lock, flags);
  977. return ret;
  978. }
  979. static long _pllre_calc_rate(struct tegra_clk_pll *pll,
  980. struct tegra_clk_pll_freq_table *cfg,
  981. unsigned long rate, unsigned long parent_rate)
  982. {
  983. u16 m, n;
  984. u64 output_rate = parent_rate;
  985. m = _pll_fixed_mdiv(pll->params, parent_rate);
  986. n = rate * m / parent_rate;
  987. output_rate *= n;
  988. do_div(output_rate, m);
  989. if (cfg) {
  990. cfg->m = m;
  991. cfg->n = n;
  992. }
  993. return output_rate;
  994. }
  995. static int clk_pllre_set_rate(struct clk_hw *hw, unsigned long rate,
  996. unsigned long parent_rate)
  997. {
  998. struct tegra_clk_pll_freq_table cfg, old_cfg;
  999. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1000. unsigned long flags = 0;
  1001. int state, ret = 0;
  1002. if (pll->lock)
  1003. spin_lock_irqsave(pll->lock, flags);
  1004. _pllre_calc_rate(pll, &cfg, rate, parent_rate);
  1005. _get_pll_mnp(pll, &old_cfg);
  1006. cfg.p = old_cfg.p;
  1007. if (cfg.m != old_cfg.m || cfg.n != old_cfg.n) {
  1008. state = clk_pll_is_enabled(hw);
  1009. if (state)
  1010. _clk_pll_disable(hw);
  1011. _update_pll_mnp(pll, &cfg);
  1012. if (state) {
  1013. _clk_pll_enable(hw);
  1014. ret = clk_pll_wait_for_lock(pll);
  1015. }
  1016. }
  1017. if (pll->lock)
  1018. spin_unlock_irqrestore(pll->lock, flags);
  1019. return ret;
  1020. }
  1021. static unsigned long clk_pllre_recalc_rate(struct clk_hw *hw,
  1022. unsigned long parent_rate)
  1023. {
  1024. struct tegra_clk_pll_freq_table cfg;
  1025. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1026. u64 rate = parent_rate;
  1027. _get_pll_mnp(pll, &cfg);
  1028. rate *= cfg.n;
  1029. do_div(rate, cfg.m);
  1030. return rate;
  1031. }
  1032. static long clk_pllre_round_rate(struct clk_hw *hw, unsigned long rate,
  1033. unsigned long *prate)
  1034. {
  1035. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1036. return _pllre_calc_rate(pll, NULL, rate, *prate);
  1037. }
  1038. static int clk_plle_tegra114_enable(struct clk_hw *hw)
  1039. {
  1040. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1041. struct tegra_clk_pll_freq_table sel;
  1042. u32 val;
  1043. int ret;
  1044. unsigned long flags = 0;
  1045. unsigned long input_rate = clk_get_rate(clk_get_parent(hw->clk));
  1046. if (_get_table_rate(hw, &sel, pll->params->fixed_rate, input_rate))
  1047. return -EINVAL;
  1048. if (pll->lock)
  1049. spin_lock_irqsave(pll->lock, flags);
  1050. val = pll_readl_base(pll);
  1051. val &= ~BIT(29); /* Disable lock override */
  1052. pll_writel_base(val, pll);
  1053. val = pll_readl(pll->params->aux_reg, pll);
  1054. val |= PLLE_AUX_ENABLE_SWCTL;
  1055. val &= ~PLLE_AUX_SEQ_ENABLE;
  1056. pll_writel(val, pll->params->aux_reg, pll);
  1057. udelay(1);
  1058. val = pll_readl_misc(pll);
  1059. val |= PLLE_MISC_LOCK_ENABLE;
  1060. val |= PLLE_MISC_IDDQ_SW_CTRL;
  1061. val &= ~PLLE_MISC_IDDQ_SW_VALUE;
  1062. val |= PLLE_MISC_PLLE_PTS;
  1063. val |= PLLE_MISC_VREG_BG_CTRL_MASK | PLLE_MISC_VREG_CTRL_MASK;
  1064. pll_writel_misc(val, pll);
  1065. udelay(5);
  1066. val = pll_readl(PLLE_SS_CTRL, pll);
  1067. val |= PLLE_SS_DISABLE;
  1068. pll_writel(val, PLLE_SS_CTRL, pll);
  1069. val = pll_readl_base(pll);
  1070. val &= ~(divp_mask_shifted(pll) | divn_mask_shifted(pll) |
  1071. divm_mask_shifted(pll));
  1072. val &= ~(PLLE_BASE_DIVCML_MASK << PLLE_BASE_DIVCML_SHIFT);
  1073. val |= sel.m << divm_shift(pll);
  1074. val |= sel.n << divn_shift(pll);
  1075. val |= sel.cpcon << PLLE_BASE_DIVCML_SHIFT;
  1076. pll_writel_base(val, pll);
  1077. udelay(1);
  1078. _clk_pll_enable(hw);
  1079. ret = clk_pll_wait_for_lock(pll);
  1080. if (ret < 0)
  1081. goto out;
  1082. val = pll_readl(PLLE_SS_CTRL, pll);
  1083. val &= ~(PLLE_SS_CNTL_CENTER | PLLE_SS_CNTL_INVERT);
  1084. val &= ~PLLE_SS_COEFFICIENTS_MASK;
  1085. val |= PLLE_SS_COEFFICIENTS_VAL;
  1086. pll_writel(val, PLLE_SS_CTRL, pll);
  1087. val &= ~(PLLE_SS_CNTL_SSC_BYP | PLLE_SS_CNTL_BYPASS_SS);
  1088. pll_writel(val, PLLE_SS_CTRL, pll);
  1089. udelay(1);
  1090. val &= ~PLLE_SS_CNTL_INTERP_RESET;
  1091. pll_writel(val, PLLE_SS_CTRL, pll);
  1092. udelay(1);
  1093. /* Enable hw control of xusb brick pll */
  1094. val = pll_readl_misc(pll);
  1095. val &= ~PLLE_MISC_IDDQ_SW_CTRL;
  1096. pll_writel_misc(val, pll);
  1097. val = pll_readl(pll->params->aux_reg, pll);
  1098. val |= (PLLE_AUX_USE_LOCKDET | PLLE_AUX_SEQ_START_STATE);
  1099. val &= ~(PLLE_AUX_ENABLE_SWCTL | PLLE_AUX_SS_SWCTL);
  1100. pll_writel(val, pll->params->aux_reg, pll);
  1101. udelay(1);
  1102. val |= PLLE_AUX_SEQ_ENABLE;
  1103. pll_writel(val, pll->params->aux_reg, pll);
  1104. val = pll_readl(XUSBIO_PLL_CFG0, pll);
  1105. val |= (XUSBIO_PLL_CFG0_PADPLL_USE_LOCKDET |
  1106. XUSBIO_PLL_CFG0_SEQ_START_STATE);
  1107. val &= ~(XUSBIO_PLL_CFG0_CLK_ENABLE_SWCTL |
  1108. XUSBIO_PLL_CFG0_PADPLL_RESET_SWCTL);
  1109. pll_writel(val, XUSBIO_PLL_CFG0, pll);
  1110. udelay(1);
  1111. val |= XUSBIO_PLL_CFG0_SEQ_ENABLE;
  1112. pll_writel(val, XUSBIO_PLL_CFG0, pll);
  1113. /* Enable hw control of SATA pll */
  1114. val = pll_readl(SATA_PLL_CFG0, pll);
  1115. val &= ~SATA_PLL_CFG0_PADPLL_RESET_SWCTL;
  1116. val |= SATA_PLL_CFG0_PADPLL_USE_LOCKDET;
  1117. val |= SATA_PLL_CFG0_SEQ_START_STATE;
  1118. pll_writel(val, SATA_PLL_CFG0, pll);
  1119. udelay(1);
  1120. val = pll_readl(SATA_PLL_CFG0, pll);
  1121. val |= SATA_PLL_CFG0_SEQ_ENABLE;
  1122. pll_writel(val, SATA_PLL_CFG0, pll);
  1123. out:
  1124. if (pll->lock)
  1125. spin_unlock_irqrestore(pll->lock, flags);
  1126. return ret;
  1127. }
  1128. static void clk_plle_tegra114_disable(struct clk_hw *hw)
  1129. {
  1130. struct tegra_clk_pll *pll = to_clk_pll(hw);
  1131. unsigned long flags = 0;
  1132. u32 val;
  1133. if (pll->lock)
  1134. spin_lock_irqsave(pll->lock, flags);
  1135. _clk_pll_disable(hw);
  1136. val = pll_readl_misc(pll);
  1137. val |= PLLE_MISC_IDDQ_SW_CTRL | PLLE_MISC_IDDQ_SW_VALUE;
  1138. pll_writel_misc(val, pll);
  1139. udelay(1);
  1140. if (pll->lock)
  1141. spin_unlock_irqrestore(pll->lock, flags);
  1142. }
  1143. #endif
  1144. static struct tegra_clk_pll *_tegra_init_pll(void __iomem *clk_base,
  1145. void __iomem *pmc, struct tegra_clk_pll_params *pll_params,
  1146. spinlock_t *lock)
  1147. {
  1148. struct tegra_clk_pll *pll;
  1149. pll = kzalloc(sizeof(*pll), GFP_KERNEL);
  1150. if (!pll)
  1151. return ERR_PTR(-ENOMEM);
  1152. pll->clk_base = clk_base;
  1153. pll->pmc = pmc;
  1154. pll->params = pll_params;
  1155. pll->lock = lock;
  1156. if (!pll_params->div_nmp)
  1157. pll_params->div_nmp = &default_nmp;
  1158. return pll;
  1159. }
  1160. static struct clk *_tegra_clk_register_pll(struct tegra_clk_pll *pll,
  1161. const char *name, const char *parent_name, unsigned long flags,
  1162. const struct clk_ops *ops)
  1163. {
  1164. struct clk_init_data init;
  1165. init.name = name;
  1166. init.ops = ops;
  1167. init.flags = flags;
  1168. init.parent_names = (parent_name ? &parent_name : NULL);
  1169. init.num_parents = (parent_name ? 1 : 0);
  1170. /* Data in .init is copied by clk_register(), so stack variable OK */
  1171. pll->hw.init = &init;
  1172. return clk_register(NULL, &pll->hw);
  1173. }
  1174. struct clk *tegra_clk_register_pll(const char *name, const char *parent_name,
  1175. void __iomem *clk_base, void __iomem *pmc,
  1176. unsigned long flags, struct tegra_clk_pll_params *pll_params,
  1177. spinlock_t *lock)
  1178. {
  1179. struct tegra_clk_pll *pll;
  1180. struct clk *clk;
  1181. pll_params->flags |= TEGRA_PLL_BYPASS;
  1182. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1183. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1184. if (IS_ERR(pll))
  1185. return ERR_CAST(pll);
  1186. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1187. &tegra_clk_pll_ops);
  1188. if (IS_ERR(clk))
  1189. kfree(pll);
  1190. return clk;
  1191. }
  1192. static struct div_nmp pll_e_nmp = {
  1193. .divn_shift = PLLE_BASE_DIVN_SHIFT,
  1194. .divn_width = PLLE_BASE_DIVN_WIDTH,
  1195. .divm_shift = PLLE_BASE_DIVM_SHIFT,
  1196. .divm_width = PLLE_BASE_DIVM_WIDTH,
  1197. .divp_shift = PLLE_BASE_DIVP_SHIFT,
  1198. .divp_width = PLLE_BASE_DIVP_WIDTH,
  1199. };
  1200. struct clk *tegra_clk_register_plle(const char *name, const char *parent_name,
  1201. void __iomem *clk_base, void __iomem *pmc,
  1202. unsigned long flags, struct tegra_clk_pll_params *pll_params,
  1203. spinlock_t *lock)
  1204. {
  1205. struct tegra_clk_pll *pll;
  1206. struct clk *clk;
  1207. pll_params->flags |= TEGRA_PLL_LOCK_MISC | TEGRA_PLL_BYPASS;
  1208. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1209. if (!pll_params->div_nmp)
  1210. pll_params->div_nmp = &pll_e_nmp;
  1211. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1212. if (IS_ERR(pll))
  1213. return ERR_CAST(pll);
  1214. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1215. &tegra_clk_plle_ops);
  1216. if (IS_ERR(clk))
  1217. kfree(pll);
  1218. return clk;
  1219. }
  1220. #if defined(CONFIG_ARCH_TEGRA_114_SOC) || \
  1221. defined(CONFIG_ARCH_TEGRA_124_SOC) || \
  1222. defined(CONFIG_ARCH_TEGRA_132_SOC)
  1223. static const struct clk_ops tegra_clk_pllxc_ops = {
  1224. .is_enabled = clk_pll_is_enabled,
  1225. .enable = clk_pll_iddq_enable,
  1226. .disable = clk_pll_iddq_disable,
  1227. .recalc_rate = clk_pll_recalc_rate,
  1228. .round_rate = clk_pll_ramp_round_rate,
  1229. .set_rate = clk_pllxc_set_rate,
  1230. };
  1231. static const struct clk_ops tegra_clk_pllm_ops = {
  1232. .is_enabled = clk_pll_is_enabled,
  1233. .enable = clk_pll_iddq_enable,
  1234. .disable = clk_pll_iddq_disable,
  1235. .recalc_rate = clk_pll_recalc_rate,
  1236. .round_rate = clk_pll_ramp_round_rate,
  1237. .set_rate = clk_pllm_set_rate,
  1238. };
  1239. static const struct clk_ops tegra_clk_pllc_ops = {
  1240. .is_enabled = clk_pll_is_enabled,
  1241. .enable = clk_pllc_enable,
  1242. .disable = clk_pllc_disable,
  1243. .recalc_rate = clk_pll_recalc_rate,
  1244. .round_rate = clk_pll_ramp_round_rate,
  1245. .set_rate = clk_pllc_set_rate,
  1246. };
  1247. static const struct clk_ops tegra_clk_pllre_ops = {
  1248. .is_enabled = clk_pll_is_enabled,
  1249. .enable = clk_pll_iddq_enable,
  1250. .disable = clk_pll_iddq_disable,
  1251. .recalc_rate = clk_pllre_recalc_rate,
  1252. .round_rate = clk_pllre_round_rate,
  1253. .set_rate = clk_pllre_set_rate,
  1254. };
  1255. static const struct clk_ops tegra_clk_plle_tegra114_ops = {
  1256. .is_enabled = clk_pll_is_enabled,
  1257. .enable = clk_plle_tegra114_enable,
  1258. .disable = clk_plle_tegra114_disable,
  1259. .recalc_rate = clk_pll_recalc_rate,
  1260. };
  1261. struct clk *tegra_clk_register_pllxc(const char *name, const char *parent_name,
  1262. void __iomem *clk_base, void __iomem *pmc,
  1263. unsigned long flags,
  1264. struct tegra_clk_pll_params *pll_params,
  1265. spinlock_t *lock)
  1266. {
  1267. struct tegra_clk_pll *pll;
  1268. struct clk *clk, *parent;
  1269. unsigned long parent_rate;
  1270. int err;
  1271. u32 val, val_iddq;
  1272. parent = __clk_lookup(parent_name);
  1273. if (!parent) {
  1274. WARN(1, "parent clk %s of %s must be registered first\n",
  1275. parent_name, name);
  1276. return ERR_PTR(-EINVAL);
  1277. }
  1278. if (!pll_params->pdiv_tohw)
  1279. return ERR_PTR(-EINVAL);
  1280. parent_rate = clk_get_rate(parent);
  1281. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1282. err = _setup_dynamic_ramp(pll_params, clk_base, parent_rate);
  1283. if (err)
  1284. return ERR_PTR(err);
  1285. val = readl_relaxed(clk_base + pll_params->base_reg);
  1286. val_iddq = readl_relaxed(clk_base + pll_params->iddq_reg);
  1287. if (val & PLL_BASE_ENABLE)
  1288. WARN_ON(val_iddq & BIT(pll_params->iddq_bit_idx));
  1289. else {
  1290. val_iddq |= BIT(pll_params->iddq_bit_idx);
  1291. writel_relaxed(val_iddq, clk_base + pll_params->iddq_reg);
  1292. }
  1293. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1294. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1295. if (IS_ERR(pll))
  1296. return ERR_CAST(pll);
  1297. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1298. &tegra_clk_pllxc_ops);
  1299. if (IS_ERR(clk))
  1300. kfree(pll);
  1301. return clk;
  1302. }
  1303. struct clk *tegra_clk_register_pllre(const char *name, const char *parent_name,
  1304. void __iomem *clk_base, void __iomem *pmc,
  1305. unsigned long flags,
  1306. struct tegra_clk_pll_params *pll_params,
  1307. spinlock_t *lock, unsigned long parent_rate)
  1308. {
  1309. u32 val;
  1310. struct tegra_clk_pll *pll;
  1311. struct clk *clk;
  1312. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE | TEGRA_PLL_LOCK_MISC;
  1313. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1314. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1315. if (IS_ERR(pll))
  1316. return ERR_CAST(pll);
  1317. /* program minimum rate by default */
  1318. val = pll_readl_base(pll);
  1319. if (val & PLL_BASE_ENABLE)
  1320. WARN_ON(val & pll_params->iddq_bit_idx);
  1321. else {
  1322. int m;
  1323. m = _pll_fixed_mdiv(pll_params, parent_rate);
  1324. val = m << divm_shift(pll);
  1325. val |= (pll_params->vco_min / parent_rate) << divn_shift(pll);
  1326. pll_writel_base(val, pll);
  1327. }
  1328. /* disable lock override */
  1329. val = pll_readl_misc(pll);
  1330. val &= ~BIT(29);
  1331. pll_writel_misc(val, pll);
  1332. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1333. &tegra_clk_pllre_ops);
  1334. if (IS_ERR(clk))
  1335. kfree(pll);
  1336. return clk;
  1337. }
  1338. struct clk *tegra_clk_register_pllm(const char *name, const char *parent_name,
  1339. void __iomem *clk_base, void __iomem *pmc,
  1340. unsigned long flags,
  1341. struct tegra_clk_pll_params *pll_params,
  1342. spinlock_t *lock)
  1343. {
  1344. struct tegra_clk_pll *pll;
  1345. struct clk *clk, *parent;
  1346. unsigned long parent_rate;
  1347. if (!pll_params->pdiv_tohw)
  1348. return ERR_PTR(-EINVAL);
  1349. parent = __clk_lookup(parent_name);
  1350. if (!parent) {
  1351. WARN(1, "parent clk %s of %s must be registered first\n",
  1352. parent_name, name);
  1353. return ERR_PTR(-EINVAL);
  1354. }
  1355. parent_rate = clk_get_rate(parent);
  1356. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1357. pll_params->flags |= TEGRA_PLL_BYPASS;
  1358. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1359. pll_params->flags |= TEGRA_PLLM;
  1360. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1361. if (IS_ERR(pll))
  1362. return ERR_CAST(pll);
  1363. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1364. &tegra_clk_pllm_ops);
  1365. if (IS_ERR(clk))
  1366. kfree(pll);
  1367. return clk;
  1368. }
  1369. struct clk *tegra_clk_register_pllc(const char *name, const char *parent_name,
  1370. void __iomem *clk_base, void __iomem *pmc,
  1371. unsigned long flags,
  1372. struct tegra_clk_pll_params *pll_params,
  1373. spinlock_t *lock)
  1374. {
  1375. struct clk *parent, *clk;
  1376. struct pdiv_map *p_tohw = pll_params->pdiv_tohw;
  1377. struct tegra_clk_pll *pll;
  1378. struct tegra_clk_pll_freq_table cfg;
  1379. unsigned long parent_rate;
  1380. if (!p_tohw)
  1381. return ERR_PTR(-EINVAL);
  1382. parent = __clk_lookup(parent_name);
  1383. if (!parent) {
  1384. WARN(1, "parent clk %s of %s must be registered first\n",
  1385. parent_name, name);
  1386. return ERR_PTR(-EINVAL);
  1387. }
  1388. parent_rate = clk_get_rate(parent);
  1389. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1390. pll_params->flags |= TEGRA_PLL_BYPASS;
  1391. pll = _tegra_init_pll(clk_base, pmc, pll_params, lock);
  1392. if (IS_ERR(pll))
  1393. return ERR_CAST(pll);
  1394. /*
  1395. * Most of PLLC register fields are shadowed, and can not be read
  1396. * directly from PLL h/w. Hence, actual PLLC boot state is unknown.
  1397. * Initialize PLL to default state: disabled, reset; shadow registers
  1398. * loaded with default parameters; dividers are preset for half of
  1399. * minimum VCO rate (the latter assured that shadowed divider settings
  1400. * are within supported range).
  1401. */
  1402. cfg.m = _pll_fixed_mdiv(pll_params, parent_rate);
  1403. cfg.n = cfg.m * pll_params->vco_min / parent_rate;
  1404. while (p_tohw->pdiv) {
  1405. if (p_tohw->pdiv == 2) {
  1406. cfg.p = p_tohw->hw_val;
  1407. break;
  1408. }
  1409. p_tohw++;
  1410. }
  1411. if (!p_tohw->pdiv) {
  1412. WARN_ON(1);
  1413. return ERR_PTR(-EINVAL);
  1414. }
  1415. pll_writel_base(0, pll);
  1416. _update_pll_mnp(pll, &cfg);
  1417. pll_writel_misc(PLLCX_MISC_DEFAULT, pll);
  1418. pll_writel(PLLCX_MISC1_DEFAULT, pll_params->ext_misc_reg[0], pll);
  1419. pll_writel(PLLCX_MISC2_DEFAULT, pll_params->ext_misc_reg[1], pll);
  1420. pll_writel(PLLCX_MISC3_DEFAULT, pll_params->ext_misc_reg[2], pll);
  1421. _pllcx_update_dynamic_coef(pll, parent_rate, cfg.n);
  1422. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1423. &tegra_clk_pllc_ops);
  1424. if (IS_ERR(clk))
  1425. kfree(pll);
  1426. return clk;
  1427. }
  1428. struct clk *tegra_clk_register_plle_tegra114(const char *name,
  1429. const char *parent_name,
  1430. void __iomem *clk_base, unsigned long flags,
  1431. struct tegra_clk_pll_params *pll_params,
  1432. spinlock_t *lock)
  1433. {
  1434. struct tegra_clk_pll *pll;
  1435. struct clk *clk;
  1436. u32 val, val_aux;
  1437. pll_params->flags |= TEGRA_PLL_HAS_LOCK_ENABLE;
  1438. pll = _tegra_init_pll(clk_base, NULL, pll_params, lock);
  1439. if (IS_ERR(pll))
  1440. return ERR_CAST(pll);
  1441. /* ensure parent is set to pll_re_vco */
  1442. val = pll_readl_base(pll);
  1443. val_aux = pll_readl(pll_params->aux_reg, pll);
  1444. if (val & PLL_BASE_ENABLE) {
  1445. if ((val_aux & PLLE_AUX_PLLRE_SEL) ||
  1446. (val_aux & PLLE_AUX_PLLP_SEL))
  1447. WARN(1, "pll_e enabled with unsupported parent %s\n",
  1448. (val_aux & PLLE_AUX_PLLP_SEL) ? "pllp_out0" :
  1449. "pll_re_vco");
  1450. } else {
  1451. val_aux &= ~(PLLE_AUX_PLLRE_SEL | PLLE_AUX_PLLP_SEL);
  1452. pll_writel(val_aux, pll_params->aux_reg, pll);
  1453. }
  1454. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1455. &tegra_clk_plle_tegra114_ops);
  1456. if (IS_ERR(clk))
  1457. kfree(pll);
  1458. return clk;
  1459. }
  1460. #endif
  1461. #if defined(CONFIG_ARCH_TEGRA_124_SOC) || defined(CONFIG_ARCH_TEGRA_132_SOC)
  1462. static const struct clk_ops tegra_clk_pllss_ops = {
  1463. .is_enabled = clk_pll_is_enabled,
  1464. .enable = clk_pll_iddq_enable,
  1465. .disable = clk_pll_iddq_disable,
  1466. .recalc_rate = clk_pll_recalc_rate,
  1467. .round_rate = clk_pll_ramp_round_rate,
  1468. .set_rate = clk_pllxc_set_rate,
  1469. };
  1470. struct clk *tegra_clk_register_pllss(const char *name, const char *parent_name,
  1471. void __iomem *clk_base, unsigned long flags,
  1472. struct tegra_clk_pll_params *pll_params,
  1473. spinlock_t *lock)
  1474. {
  1475. struct tegra_clk_pll *pll;
  1476. struct clk *clk, *parent;
  1477. struct tegra_clk_pll_freq_table cfg;
  1478. unsigned long parent_rate;
  1479. u32 val;
  1480. int i;
  1481. if (!pll_params->div_nmp)
  1482. return ERR_PTR(-EINVAL);
  1483. parent = __clk_lookup(parent_name);
  1484. if (!parent) {
  1485. WARN(1, "parent clk %s of %s must be registered first\n",
  1486. parent_name, name);
  1487. return ERR_PTR(-EINVAL);
  1488. }
  1489. pll_params->flags = TEGRA_PLL_HAS_LOCK_ENABLE | TEGRA_PLL_USE_LOCK;
  1490. pll = _tegra_init_pll(clk_base, NULL, pll_params, lock);
  1491. if (IS_ERR(pll))
  1492. return ERR_CAST(pll);
  1493. val = pll_readl_base(pll);
  1494. val &= ~PLLSS_REF_SRC_SEL_MASK;
  1495. pll_writel_base(val, pll);
  1496. parent_rate = clk_get_rate(parent);
  1497. pll_params->vco_min = _clip_vco_min(pll_params->vco_min, parent_rate);
  1498. /* initialize PLL to minimum rate */
  1499. cfg.m = _pll_fixed_mdiv(pll_params, parent_rate);
  1500. cfg.n = cfg.m * pll_params->vco_min / parent_rate;
  1501. for (i = 0; pll_params->pdiv_tohw[i].pdiv; i++)
  1502. ;
  1503. if (!i) {
  1504. kfree(pll);
  1505. return ERR_PTR(-EINVAL);
  1506. }
  1507. cfg.p = pll_params->pdiv_tohw[i-1].hw_val;
  1508. _update_pll_mnp(pll, &cfg);
  1509. pll_writel_misc(PLLSS_MISC_DEFAULT, pll);
  1510. pll_writel(PLLSS_CFG_DEFAULT, pll_params->ext_misc_reg[0], pll);
  1511. pll_writel(PLLSS_CTRL1_DEFAULT, pll_params->ext_misc_reg[1], pll);
  1512. pll_writel(PLLSS_CTRL1_DEFAULT, pll_params->ext_misc_reg[2], pll);
  1513. val = pll_readl_base(pll);
  1514. if (val & PLL_BASE_ENABLE) {
  1515. if (val & BIT(pll_params->iddq_bit_idx)) {
  1516. WARN(1, "%s is on but IDDQ set\n", name);
  1517. kfree(pll);
  1518. return ERR_PTR(-EINVAL);
  1519. }
  1520. } else
  1521. val |= BIT(pll_params->iddq_bit_idx);
  1522. val &= ~PLLSS_LOCK_OVERRIDE;
  1523. pll_writel_base(val, pll);
  1524. clk = _tegra_clk_register_pll(pll, name, parent_name, flags,
  1525. &tegra_clk_pllss_ops);
  1526. if (IS_ERR(clk))
  1527. kfree(pll);
  1528. return clk;
  1529. }
  1530. #endif