clk-alpha-pll.c 40 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (c) 2015, 2018, The Linux Foundation. All rights reserved.
  4. */
  5. #include <linux/kernel.h>
  6. #include <linux/export.h>
  7. #include <linux/clk-provider.h>
  8. #include <linux/regmap.h>
  9. #include <linux/delay.h>
  10. #include "clk-alpha-pll.h"
  11. #include "common.h"
  12. #define PLL_MODE(p) ((p)->offset + 0x0)
  13. # define PLL_OUTCTRL BIT(0)
  14. # define PLL_BYPASSNL BIT(1)
  15. # define PLL_RESET_N BIT(2)
  16. # define PLL_OFFLINE_REQ BIT(7)
  17. # define PLL_LOCK_COUNT_SHIFT 8
  18. # define PLL_LOCK_COUNT_MASK 0x3f
  19. # define PLL_BIAS_COUNT_SHIFT 14
  20. # define PLL_BIAS_COUNT_MASK 0x3f
  21. # define PLL_VOTE_FSM_ENA BIT(20)
  22. # define PLL_FSM_ENA BIT(20)
  23. # define PLL_VOTE_FSM_RESET BIT(21)
  24. # define PLL_UPDATE BIT(22)
  25. # define PLL_UPDATE_BYPASS BIT(23)
  26. # define PLL_OFFLINE_ACK BIT(28)
  27. # define ALPHA_PLL_ACK_LATCH BIT(29)
  28. # define PLL_ACTIVE_FLAG BIT(30)
  29. # define PLL_LOCK_DET BIT(31)
  30. #define PLL_L_VAL(p) ((p)->offset + (p)->regs[PLL_OFF_L_VAL])
  31. #define PLL_CAL_L_VAL(p) ((p)->offset + (p)->regs[PLL_OFF_CAL_L_VAL])
  32. #define PLL_ALPHA_VAL(p) ((p)->offset + (p)->regs[PLL_OFF_ALPHA_VAL])
  33. #define PLL_ALPHA_VAL_U(p) ((p)->offset + (p)->regs[PLL_OFF_ALPHA_VAL_U])
  34. #define PLL_USER_CTL(p) ((p)->offset + (p)->regs[PLL_OFF_USER_CTL])
  35. # define PLL_POST_DIV_SHIFT 8
  36. # define PLL_POST_DIV_MASK(p) GENMASK((p)->width, 0)
  37. # define PLL_ALPHA_EN BIT(24)
  38. # define PLL_ALPHA_MODE BIT(25)
  39. # define PLL_VCO_SHIFT 20
  40. # define PLL_VCO_MASK 0x3
  41. #define PLL_USER_CTL_U(p) ((p)->offset + (p)->regs[PLL_OFF_USER_CTL_U])
  42. #define PLL_USER_CTL_U1(p) ((p)->offset + (p)->regs[PLL_OFF_USER_CTL_U1])
  43. #define PLL_CONFIG_CTL(p) ((p)->offset + (p)->regs[PLL_OFF_CONFIG_CTL])
  44. #define PLL_CONFIG_CTL_U(p) ((p)->offset + (p)->regs[PLL_OFF_CONFIG_CTL_U])
  45. #define PLL_CONFIG_CTL_U1(p) ((p)->offset + (p)->regs[PLL_OFF_CONFIG_CTL_U1])
  46. #define PLL_TEST_CTL(p) ((p)->offset + (p)->regs[PLL_OFF_TEST_CTL])
  47. #define PLL_TEST_CTL_U(p) ((p)->offset + (p)->regs[PLL_OFF_TEST_CTL_U])
  48. #define PLL_TEST_CTL_U1(p) ((p)->offset + (p)->regs[PLL_OFF_TEST_CTL_U1])
  49. #define PLL_STATUS(p) ((p)->offset + (p)->regs[PLL_OFF_STATUS])
  50. #define PLL_OPMODE(p) ((p)->offset + (p)->regs[PLL_OFF_OPMODE])
  51. #define PLL_FRAC(p) ((p)->offset + (p)->regs[PLL_OFF_FRAC])
  52. const u8 clk_alpha_pll_regs[][PLL_OFF_MAX_REGS] = {
  53. [CLK_ALPHA_PLL_TYPE_DEFAULT] = {
  54. [PLL_OFF_L_VAL] = 0x04,
  55. [PLL_OFF_ALPHA_VAL] = 0x08,
  56. [PLL_OFF_ALPHA_VAL_U] = 0x0c,
  57. [PLL_OFF_USER_CTL] = 0x10,
  58. [PLL_OFF_USER_CTL_U] = 0x14,
  59. [PLL_OFF_CONFIG_CTL] = 0x18,
  60. [PLL_OFF_TEST_CTL] = 0x1c,
  61. [PLL_OFF_TEST_CTL_U] = 0x20,
  62. [PLL_OFF_STATUS] = 0x24,
  63. },
  64. [CLK_ALPHA_PLL_TYPE_HUAYRA] = {
  65. [PLL_OFF_L_VAL] = 0x04,
  66. [PLL_OFF_ALPHA_VAL] = 0x08,
  67. [PLL_OFF_USER_CTL] = 0x10,
  68. [PLL_OFF_CONFIG_CTL] = 0x14,
  69. [PLL_OFF_CONFIG_CTL_U] = 0x18,
  70. [PLL_OFF_TEST_CTL] = 0x1c,
  71. [PLL_OFF_TEST_CTL_U] = 0x20,
  72. [PLL_OFF_STATUS] = 0x24,
  73. },
  74. [CLK_ALPHA_PLL_TYPE_BRAMMO] = {
  75. [PLL_OFF_L_VAL] = 0x04,
  76. [PLL_OFF_ALPHA_VAL] = 0x08,
  77. [PLL_OFF_ALPHA_VAL_U] = 0x0c,
  78. [PLL_OFF_USER_CTL] = 0x10,
  79. [PLL_OFF_CONFIG_CTL] = 0x18,
  80. [PLL_OFF_TEST_CTL] = 0x1c,
  81. [PLL_OFF_STATUS] = 0x24,
  82. },
  83. [CLK_ALPHA_PLL_TYPE_FABIA] = {
  84. [PLL_OFF_L_VAL] = 0x04,
  85. [PLL_OFF_USER_CTL] = 0x0c,
  86. [PLL_OFF_USER_CTL_U] = 0x10,
  87. [PLL_OFF_CONFIG_CTL] = 0x14,
  88. [PLL_OFF_CONFIG_CTL_U] = 0x18,
  89. [PLL_OFF_TEST_CTL] = 0x1c,
  90. [PLL_OFF_TEST_CTL_U] = 0x20,
  91. [PLL_OFF_STATUS] = 0x24,
  92. [PLL_OFF_OPMODE] = 0x2c,
  93. [PLL_OFF_FRAC] = 0x38,
  94. },
  95. [CLK_ALPHA_PLL_TYPE_TRION] = {
  96. [PLL_OFF_L_VAL] = 0x04,
  97. [PLL_OFF_CAL_L_VAL] = 0x08,
  98. [PLL_OFF_USER_CTL] = 0x0c,
  99. [PLL_OFF_USER_CTL_U] = 0x10,
  100. [PLL_OFF_USER_CTL_U1] = 0x14,
  101. [PLL_OFF_CONFIG_CTL] = 0x18,
  102. [PLL_OFF_CONFIG_CTL_U] = 0x1c,
  103. [PLL_OFF_CONFIG_CTL_U1] = 0x20,
  104. [PLL_OFF_TEST_CTL] = 0x24,
  105. [PLL_OFF_TEST_CTL_U] = 0x28,
  106. [PLL_OFF_TEST_CTL_U1] = 0x2c,
  107. [PLL_OFF_STATUS] = 0x30,
  108. [PLL_OFF_OPMODE] = 0x38,
  109. [PLL_OFF_ALPHA_VAL] = 0x40,
  110. },
  111. };
  112. EXPORT_SYMBOL_GPL(clk_alpha_pll_regs);
  113. /*
  114. * Even though 40 bits are present, use only 32 for ease of calculation.
  115. */
  116. #define ALPHA_REG_BITWIDTH 40
  117. #define ALPHA_REG_16BIT_WIDTH 16
  118. #define ALPHA_BITWIDTH 32U
  119. #define ALPHA_SHIFT(w) min(w, ALPHA_BITWIDTH)
  120. #define PLL_HUAYRA_M_WIDTH 8
  121. #define PLL_HUAYRA_M_SHIFT 8
  122. #define PLL_HUAYRA_M_MASK 0xff
  123. #define PLL_HUAYRA_N_SHIFT 0
  124. #define PLL_HUAYRA_N_MASK 0xff
  125. #define PLL_HUAYRA_ALPHA_WIDTH 16
  126. #define PLL_STANDBY 0x0
  127. #define PLL_RUN 0x1
  128. #define PLL_OUT_MASK 0x7
  129. #define PLL_RATE_MARGIN 500
  130. /* TRION PLL specific settings and offsets */
  131. #define TRION_PLL_CAL_VAL 0x44
  132. #define TRION_PCAL_DONE BIT(26)
  133. /* LUCID PLL specific settings and offsets */
  134. #define LUCID_PCAL_DONE BIT(27)
  135. #define pll_alpha_width(p) \
  136. ((PLL_ALPHA_VAL_U(p) - PLL_ALPHA_VAL(p) == 4) ? \
  137. ALPHA_REG_BITWIDTH : ALPHA_REG_16BIT_WIDTH)
  138. #define pll_has_64bit_config(p) ((PLL_CONFIG_CTL_U(p) - PLL_CONFIG_CTL(p)) == 4)
  139. #define to_clk_alpha_pll(_hw) container_of(to_clk_regmap(_hw), \
  140. struct clk_alpha_pll, clkr)
  141. #define to_clk_alpha_pll_postdiv(_hw) container_of(to_clk_regmap(_hw), \
  142. struct clk_alpha_pll_postdiv, clkr)
  143. static int wait_for_pll(struct clk_alpha_pll *pll, u32 mask, bool inverse,
  144. const char *action)
  145. {
  146. u32 val;
  147. int count;
  148. int ret;
  149. const char *name = clk_hw_get_name(&pll->clkr.hw);
  150. ret = regmap_read(pll->clkr.regmap, PLL_MODE(pll), &val);
  151. if (ret)
  152. return ret;
  153. for (count = 100; count > 0; count--) {
  154. ret = regmap_read(pll->clkr.regmap, PLL_MODE(pll), &val);
  155. if (ret)
  156. return ret;
  157. if (inverse && !(val & mask))
  158. return 0;
  159. else if ((val & mask) == mask)
  160. return 0;
  161. udelay(1);
  162. }
  163. WARN(1, "%s failed to %s!\n", name, action);
  164. return -ETIMEDOUT;
  165. }
  166. #define wait_for_pll_enable_active(pll) \
  167. wait_for_pll(pll, PLL_ACTIVE_FLAG, 0, "enable")
  168. #define wait_for_pll_enable_lock(pll) \
  169. wait_for_pll(pll, PLL_LOCK_DET, 0, "enable")
  170. #define wait_for_pll_disable(pll) \
  171. wait_for_pll(pll, PLL_ACTIVE_FLAG, 1, "disable")
  172. #define wait_for_pll_offline(pll) \
  173. wait_for_pll(pll, PLL_OFFLINE_ACK, 0, "offline")
  174. #define wait_for_pll_update(pll) \
  175. wait_for_pll(pll, PLL_UPDATE, 1, "update")
  176. #define wait_for_pll_update_ack_set(pll) \
  177. wait_for_pll(pll, ALPHA_PLL_ACK_LATCH, 0, "update_ack_set")
  178. #define wait_for_pll_update_ack_clear(pll) \
  179. wait_for_pll(pll, ALPHA_PLL_ACK_LATCH, 1, "update_ack_clear")
  180. void clk_alpha_pll_configure(struct clk_alpha_pll *pll, struct regmap *regmap,
  181. const struct alpha_pll_config *config)
  182. {
  183. u32 val, mask;
  184. regmap_write(regmap, PLL_L_VAL(pll), config->l);
  185. regmap_write(regmap, PLL_ALPHA_VAL(pll), config->alpha);
  186. regmap_write(regmap, PLL_CONFIG_CTL(pll), config->config_ctl_val);
  187. if (pll_has_64bit_config(pll))
  188. regmap_write(regmap, PLL_CONFIG_CTL_U(pll),
  189. config->config_ctl_hi_val);
  190. if (pll_alpha_width(pll) > 32)
  191. regmap_write(regmap, PLL_ALPHA_VAL_U(pll), config->alpha_hi);
  192. val = config->main_output_mask;
  193. val |= config->aux_output_mask;
  194. val |= config->aux2_output_mask;
  195. val |= config->early_output_mask;
  196. val |= config->pre_div_val;
  197. val |= config->post_div_val;
  198. val |= config->vco_val;
  199. val |= config->alpha_en_mask;
  200. val |= config->alpha_mode_mask;
  201. mask = config->main_output_mask;
  202. mask |= config->aux_output_mask;
  203. mask |= config->aux2_output_mask;
  204. mask |= config->early_output_mask;
  205. mask |= config->pre_div_mask;
  206. mask |= config->post_div_mask;
  207. mask |= config->vco_mask;
  208. regmap_update_bits(regmap, PLL_USER_CTL(pll), mask, val);
  209. if (pll->flags & SUPPORTS_FSM_MODE)
  210. qcom_pll_set_fsm_mode(regmap, PLL_MODE(pll), 6, 0);
  211. }
  212. EXPORT_SYMBOL_GPL(clk_alpha_pll_configure);
  213. static int clk_alpha_pll_hwfsm_enable(struct clk_hw *hw)
  214. {
  215. int ret;
  216. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  217. u32 val;
  218. ret = regmap_read(pll->clkr.regmap, PLL_MODE(pll), &val);
  219. if (ret)
  220. return ret;
  221. val |= PLL_FSM_ENA;
  222. if (pll->flags & SUPPORTS_OFFLINE_REQ)
  223. val &= ~PLL_OFFLINE_REQ;
  224. ret = regmap_write(pll->clkr.regmap, PLL_MODE(pll), val);
  225. if (ret)
  226. return ret;
  227. /* Make sure enable request goes through before waiting for update */
  228. mb();
  229. return wait_for_pll_enable_active(pll);
  230. }
  231. static void clk_alpha_pll_hwfsm_disable(struct clk_hw *hw)
  232. {
  233. int ret;
  234. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  235. u32 val;
  236. ret = regmap_read(pll->clkr.regmap, PLL_MODE(pll), &val);
  237. if (ret)
  238. return;
  239. if (pll->flags & SUPPORTS_OFFLINE_REQ) {
  240. ret = regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll),
  241. PLL_OFFLINE_REQ, PLL_OFFLINE_REQ);
  242. if (ret)
  243. return;
  244. ret = wait_for_pll_offline(pll);
  245. if (ret)
  246. return;
  247. }
  248. /* Disable hwfsm */
  249. ret = regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll),
  250. PLL_FSM_ENA, 0);
  251. if (ret)
  252. return;
  253. wait_for_pll_disable(pll);
  254. }
  255. static int pll_is_enabled(struct clk_hw *hw, u32 mask)
  256. {
  257. int ret;
  258. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  259. u32 val;
  260. ret = regmap_read(pll->clkr.regmap, PLL_MODE(pll), &val);
  261. if (ret)
  262. return ret;
  263. return !!(val & mask);
  264. }
  265. static int clk_alpha_pll_hwfsm_is_enabled(struct clk_hw *hw)
  266. {
  267. return pll_is_enabled(hw, PLL_ACTIVE_FLAG);
  268. }
  269. static int clk_alpha_pll_is_enabled(struct clk_hw *hw)
  270. {
  271. return pll_is_enabled(hw, PLL_LOCK_DET);
  272. }
  273. static int clk_alpha_pll_enable(struct clk_hw *hw)
  274. {
  275. int ret;
  276. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  277. u32 val, mask;
  278. mask = PLL_OUTCTRL | PLL_RESET_N | PLL_BYPASSNL;
  279. ret = regmap_read(pll->clkr.regmap, PLL_MODE(pll), &val);
  280. if (ret)
  281. return ret;
  282. /* If in FSM mode, just vote for it */
  283. if (val & PLL_VOTE_FSM_ENA) {
  284. ret = clk_enable_regmap(hw);
  285. if (ret)
  286. return ret;
  287. return wait_for_pll_enable_active(pll);
  288. }
  289. /* Skip if already enabled */
  290. if ((val & mask) == mask)
  291. return 0;
  292. ret = regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll),
  293. PLL_BYPASSNL, PLL_BYPASSNL);
  294. if (ret)
  295. return ret;
  296. /*
  297. * H/W requires a 5us delay between disabling the bypass and
  298. * de-asserting the reset.
  299. */
  300. mb();
  301. udelay(5);
  302. ret = regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll),
  303. PLL_RESET_N, PLL_RESET_N);
  304. if (ret)
  305. return ret;
  306. ret = wait_for_pll_enable_lock(pll);
  307. if (ret)
  308. return ret;
  309. ret = regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll),
  310. PLL_OUTCTRL, PLL_OUTCTRL);
  311. /* Ensure that the write above goes through before returning. */
  312. mb();
  313. return ret;
  314. }
  315. static void clk_alpha_pll_disable(struct clk_hw *hw)
  316. {
  317. int ret;
  318. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  319. u32 val, mask;
  320. ret = regmap_read(pll->clkr.regmap, PLL_MODE(pll), &val);
  321. if (ret)
  322. return;
  323. /* If in FSM mode, just unvote it */
  324. if (val & PLL_VOTE_FSM_ENA) {
  325. clk_disable_regmap(hw);
  326. return;
  327. }
  328. mask = PLL_OUTCTRL;
  329. regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll), mask, 0);
  330. /* Delay of 2 output clock ticks required until output is disabled */
  331. mb();
  332. udelay(1);
  333. mask = PLL_RESET_N | PLL_BYPASSNL;
  334. regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll), mask, 0);
  335. }
  336. static unsigned long
  337. alpha_pll_calc_rate(u64 prate, u32 l, u32 a, u32 alpha_width)
  338. {
  339. return (prate * l) + ((prate * a) >> ALPHA_SHIFT(alpha_width));
  340. }
  341. static unsigned long
  342. alpha_pll_round_rate(unsigned long rate, unsigned long prate, u32 *l, u64 *a,
  343. u32 alpha_width)
  344. {
  345. u64 remainder;
  346. u64 quotient;
  347. quotient = rate;
  348. remainder = do_div(quotient, prate);
  349. *l = quotient;
  350. if (!remainder) {
  351. *a = 0;
  352. return rate;
  353. }
  354. /* Upper ALPHA_BITWIDTH bits of Alpha */
  355. quotient = remainder << ALPHA_SHIFT(alpha_width);
  356. remainder = do_div(quotient, prate);
  357. if (remainder)
  358. quotient++;
  359. *a = quotient;
  360. return alpha_pll_calc_rate(prate, *l, *a, alpha_width);
  361. }
  362. static const struct pll_vco *
  363. alpha_pll_find_vco(const struct clk_alpha_pll *pll, unsigned long rate)
  364. {
  365. const struct pll_vco *v = pll->vco_table;
  366. const struct pll_vco *end = v + pll->num_vco;
  367. for (; v < end; v++)
  368. if (rate >= v->min_freq && rate <= v->max_freq)
  369. return v;
  370. return NULL;
  371. }
  372. static unsigned long
  373. clk_alpha_pll_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
  374. {
  375. u32 l, low, high, ctl;
  376. u64 a = 0, prate = parent_rate;
  377. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  378. u32 alpha_width = pll_alpha_width(pll);
  379. regmap_read(pll->clkr.regmap, PLL_L_VAL(pll), &l);
  380. regmap_read(pll->clkr.regmap, PLL_USER_CTL(pll), &ctl);
  381. if (ctl & PLL_ALPHA_EN) {
  382. regmap_read(pll->clkr.regmap, PLL_ALPHA_VAL(pll), &low);
  383. if (alpha_width > 32) {
  384. regmap_read(pll->clkr.regmap, PLL_ALPHA_VAL_U(pll),
  385. &high);
  386. a = (u64)high << 32 | low;
  387. } else {
  388. a = low & GENMASK(alpha_width - 1, 0);
  389. }
  390. if (alpha_width > ALPHA_BITWIDTH)
  391. a >>= alpha_width - ALPHA_BITWIDTH;
  392. }
  393. return alpha_pll_calc_rate(prate, l, a, alpha_width);
  394. }
  395. static int __clk_alpha_pll_update_latch(struct clk_alpha_pll *pll)
  396. {
  397. int ret;
  398. u32 mode;
  399. regmap_read(pll->clkr.regmap, PLL_MODE(pll), &mode);
  400. /* Latch the input to the PLL */
  401. regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll), PLL_UPDATE,
  402. PLL_UPDATE);
  403. /* Wait for 2 reference cycle before checking ACK bit */
  404. udelay(1);
  405. /*
  406. * PLL will latch the new L, Alpha and freq control word.
  407. * PLL will respond by raising PLL_ACK_LATCH output when new programming
  408. * has been latched in and PLL is being updated. When
  409. * UPDATE_LOGIC_BYPASS bit is not set, PLL_UPDATE will be cleared
  410. * automatically by hardware when PLL_ACK_LATCH is asserted by PLL.
  411. */
  412. if (mode & PLL_UPDATE_BYPASS) {
  413. ret = wait_for_pll_update_ack_set(pll);
  414. if (ret)
  415. return ret;
  416. regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll), PLL_UPDATE, 0);
  417. } else {
  418. ret = wait_for_pll_update(pll);
  419. if (ret)
  420. return ret;
  421. }
  422. ret = wait_for_pll_update_ack_clear(pll);
  423. if (ret)
  424. return ret;
  425. /* Wait for PLL output to stabilize */
  426. udelay(10);
  427. return 0;
  428. }
  429. static int clk_alpha_pll_update_latch(struct clk_alpha_pll *pll,
  430. int (*is_enabled)(struct clk_hw *))
  431. {
  432. if (!is_enabled(&pll->clkr.hw) ||
  433. !(pll->flags & SUPPORTS_DYNAMIC_UPDATE))
  434. return 0;
  435. return __clk_alpha_pll_update_latch(pll);
  436. }
  437. static int __clk_alpha_pll_set_rate(struct clk_hw *hw, unsigned long rate,
  438. unsigned long prate,
  439. int (*is_enabled)(struct clk_hw *))
  440. {
  441. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  442. const struct pll_vco *vco;
  443. u32 l, alpha_width = pll_alpha_width(pll);
  444. u64 a;
  445. rate = alpha_pll_round_rate(rate, prate, &l, &a, alpha_width);
  446. vco = alpha_pll_find_vco(pll, rate);
  447. if (pll->vco_table && !vco) {
  448. pr_err("%s: alpha pll not in a valid vco range\n",
  449. clk_hw_get_name(hw));
  450. return -EINVAL;
  451. }
  452. regmap_write(pll->clkr.regmap, PLL_L_VAL(pll), l);
  453. if (alpha_width > ALPHA_BITWIDTH)
  454. a <<= alpha_width - ALPHA_BITWIDTH;
  455. if (alpha_width > 32)
  456. regmap_write(pll->clkr.regmap, PLL_ALPHA_VAL_U(pll), a >> 32);
  457. regmap_write(pll->clkr.regmap, PLL_ALPHA_VAL(pll), a);
  458. if (vco) {
  459. regmap_update_bits(pll->clkr.regmap, PLL_USER_CTL(pll),
  460. PLL_VCO_MASK << PLL_VCO_SHIFT,
  461. vco->val << PLL_VCO_SHIFT);
  462. }
  463. regmap_update_bits(pll->clkr.regmap, PLL_USER_CTL(pll),
  464. PLL_ALPHA_EN, PLL_ALPHA_EN);
  465. return clk_alpha_pll_update_latch(pll, is_enabled);
  466. }
  467. static int clk_alpha_pll_set_rate(struct clk_hw *hw, unsigned long rate,
  468. unsigned long prate)
  469. {
  470. return __clk_alpha_pll_set_rate(hw, rate, prate,
  471. clk_alpha_pll_is_enabled);
  472. }
  473. static int clk_alpha_pll_hwfsm_set_rate(struct clk_hw *hw, unsigned long rate,
  474. unsigned long prate)
  475. {
  476. return __clk_alpha_pll_set_rate(hw, rate, prate,
  477. clk_alpha_pll_hwfsm_is_enabled);
  478. }
  479. static long clk_alpha_pll_round_rate(struct clk_hw *hw, unsigned long rate,
  480. unsigned long *prate)
  481. {
  482. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  483. u32 l, alpha_width = pll_alpha_width(pll);
  484. u64 a;
  485. unsigned long min_freq, max_freq;
  486. rate = alpha_pll_round_rate(rate, *prate, &l, &a, alpha_width);
  487. if (!pll->vco_table || alpha_pll_find_vco(pll, rate))
  488. return rate;
  489. min_freq = pll->vco_table[0].min_freq;
  490. max_freq = pll->vco_table[pll->num_vco - 1].max_freq;
  491. return clamp(rate, min_freq, max_freq);
  492. }
  493. static unsigned long
  494. alpha_huayra_pll_calc_rate(u64 prate, u32 l, u32 a)
  495. {
  496. /*
  497. * a contains 16 bit alpha_val in two’s complement number in the range
  498. * of [-0.5, 0.5).
  499. */
  500. if (a >= BIT(PLL_HUAYRA_ALPHA_WIDTH - 1))
  501. l -= 1;
  502. return (prate * l) + (prate * a >> PLL_HUAYRA_ALPHA_WIDTH);
  503. }
  504. static unsigned long
  505. alpha_huayra_pll_round_rate(unsigned long rate, unsigned long prate,
  506. u32 *l, u32 *a)
  507. {
  508. u64 remainder;
  509. u64 quotient;
  510. quotient = rate;
  511. remainder = do_div(quotient, prate);
  512. *l = quotient;
  513. if (!remainder) {
  514. *a = 0;
  515. return rate;
  516. }
  517. quotient = remainder << PLL_HUAYRA_ALPHA_WIDTH;
  518. remainder = do_div(quotient, prate);
  519. if (remainder)
  520. quotient++;
  521. /*
  522. * alpha_val should be in two’s complement number in the range
  523. * of [-0.5, 0.5) so if quotient >= 0.5 then increment the l value
  524. * since alpha value will be subtracted in this case.
  525. */
  526. if (quotient >= BIT(PLL_HUAYRA_ALPHA_WIDTH - 1))
  527. *l += 1;
  528. *a = quotient;
  529. return alpha_huayra_pll_calc_rate(prate, *l, *a);
  530. }
  531. static unsigned long
  532. alpha_pll_huayra_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
  533. {
  534. u64 rate = parent_rate, tmp;
  535. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  536. u32 l, alpha = 0, ctl, alpha_m, alpha_n;
  537. regmap_read(pll->clkr.regmap, PLL_L_VAL(pll), &l);
  538. regmap_read(pll->clkr.regmap, PLL_USER_CTL(pll), &ctl);
  539. if (ctl & PLL_ALPHA_EN) {
  540. regmap_read(pll->clkr.regmap, PLL_ALPHA_VAL(pll), &alpha);
  541. /*
  542. * Depending upon alpha_mode, it can be treated as M/N value or
  543. * as a two’s complement number. When alpha_mode=1,
  544. * pll_alpha_val<15:8>=M and pll_apla_val<7:0>=N
  545. *
  546. * Fout=FIN*(L+(M/N))
  547. *
  548. * M is a signed number (-128 to 127) and N is unsigned
  549. * (0 to 255). M/N has to be within +/-0.5.
  550. *
  551. * When alpha_mode=0, it is a two’s complement number in the
  552. * range [-0.5, 0.5).
  553. *
  554. * Fout=FIN*(L+(alpha_val)/2^16)
  555. *
  556. * where alpha_val is two’s complement number.
  557. */
  558. if (!(ctl & PLL_ALPHA_MODE))
  559. return alpha_huayra_pll_calc_rate(rate, l, alpha);
  560. alpha_m = alpha >> PLL_HUAYRA_M_SHIFT & PLL_HUAYRA_M_MASK;
  561. alpha_n = alpha >> PLL_HUAYRA_N_SHIFT & PLL_HUAYRA_N_MASK;
  562. rate *= l;
  563. tmp = parent_rate;
  564. if (alpha_m >= BIT(PLL_HUAYRA_M_WIDTH - 1)) {
  565. alpha_m = BIT(PLL_HUAYRA_M_WIDTH) - alpha_m;
  566. tmp *= alpha_m;
  567. do_div(tmp, alpha_n);
  568. rate -= tmp;
  569. } else {
  570. tmp *= alpha_m;
  571. do_div(tmp, alpha_n);
  572. rate += tmp;
  573. }
  574. return rate;
  575. }
  576. return alpha_huayra_pll_calc_rate(rate, l, alpha);
  577. }
  578. static int alpha_pll_huayra_set_rate(struct clk_hw *hw, unsigned long rate,
  579. unsigned long prate)
  580. {
  581. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  582. u32 l, a, ctl, cur_alpha = 0;
  583. rate = alpha_huayra_pll_round_rate(rate, prate, &l, &a);
  584. regmap_read(pll->clkr.regmap, PLL_USER_CTL(pll), &ctl);
  585. if (ctl & PLL_ALPHA_EN)
  586. regmap_read(pll->clkr.regmap, PLL_ALPHA_VAL(pll), &cur_alpha);
  587. /*
  588. * Huayra PLL supports PLL dynamic programming. User can change L_VAL,
  589. * without having to go through the power on sequence.
  590. */
  591. if (clk_alpha_pll_is_enabled(hw)) {
  592. if (cur_alpha != a) {
  593. pr_err("%s: clock needs to be gated\n",
  594. clk_hw_get_name(hw));
  595. return -EBUSY;
  596. }
  597. regmap_write(pll->clkr.regmap, PLL_L_VAL(pll), l);
  598. /* Ensure that the write above goes to detect L val change. */
  599. mb();
  600. return wait_for_pll_enable_lock(pll);
  601. }
  602. regmap_write(pll->clkr.regmap, PLL_L_VAL(pll), l);
  603. regmap_write(pll->clkr.regmap, PLL_ALPHA_VAL(pll), a);
  604. if (a == 0)
  605. regmap_update_bits(pll->clkr.regmap, PLL_USER_CTL(pll),
  606. PLL_ALPHA_EN, 0x0);
  607. else
  608. regmap_update_bits(pll->clkr.regmap, PLL_USER_CTL(pll),
  609. PLL_ALPHA_EN | PLL_ALPHA_MODE, PLL_ALPHA_EN);
  610. return 0;
  611. }
  612. static long alpha_pll_huayra_round_rate(struct clk_hw *hw, unsigned long rate,
  613. unsigned long *prate)
  614. {
  615. u32 l, a;
  616. return alpha_huayra_pll_round_rate(rate, *prate, &l, &a);
  617. }
  618. static int trion_pll_is_enabled(struct clk_alpha_pll *pll,
  619. struct regmap *regmap)
  620. {
  621. u32 mode_regval, opmode_regval;
  622. int ret;
  623. ret = regmap_read(regmap, PLL_MODE(pll), &mode_regval);
  624. ret |= regmap_read(regmap, PLL_OPMODE(pll), &opmode_regval);
  625. if (ret)
  626. return 0;
  627. return ((opmode_regval & PLL_RUN) && (mode_regval & PLL_OUTCTRL));
  628. }
  629. static int clk_trion_pll_is_enabled(struct clk_hw *hw)
  630. {
  631. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  632. return trion_pll_is_enabled(pll, pll->clkr.regmap);
  633. }
  634. static int clk_trion_pll_enable(struct clk_hw *hw)
  635. {
  636. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  637. struct regmap *regmap = pll->clkr.regmap;
  638. u32 val;
  639. int ret;
  640. ret = regmap_read(regmap, PLL_MODE(pll), &val);
  641. if (ret)
  642. return ret;
  643. /* If in FSM mode, just vote for it */
  644. if (val & PLL_VOTE_FSM_ENA) {
  645. ret = clk_enable_regmap(hw);
  646. if (ret)
  647. return ret;
  648. return wait_for_pll_enable_active(pll);
  649. }
  650. /* Set operation mode to RUN */
  651. regmap_write(regmap, PLL_OPMODE(pll), PLL_RUN);
  652. ret = wait_for_pll_enable_lock(pll);
  653. if (ret)
  654. return ret;
  655. /* Enable the PLL outputs */
  656. ret = regmap_update_bits(regmap, PLL_USER_CTL(pll),
  657. PLL_OUT_MASK, PLL_OUT_MASK);
  658. if (ret)
  659. return ret;
  660. /* Enable the global PLL outputs */
  661. return regmap_update_bits(regmap, PLL_MODE(pll),
  662. PLL_OUTCTRL, PLL_OUTCTRL);
  663. }
  664. static void clk_trion_pll_disable(struct clk_hw *hw)
  665. {
  666. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  667. struct regmap *regmap = pll->clkr.regmap;
  668. u32 val;
  669. int ret;
  670. ret = regmap_read(regmap, PLL_MODE(pll), &val);
  671. if (ret)
  672. return;
  673. /* If in FSM mode, just unvote it */
  674. if (val & PLL_VOTE_FSM_ENA) {
  675. clk_disable_regmap(hw);
  676. return;
  677. }
  678. /* Disable the global PLL output */
  679. ret = regmap_update_bits(regmap, PLL_MODE(pll), PLL_OUTCTRL, 0);
  680. if (ret)
  681. return;
  682. /* Disable the PLL outputs */
  683. ret = regmap_update_bits(regmap, PLL_USER_CTL(pll),
  684. PLL_OUT_MASK, 0);
  685. if (ret)
  686. return;
  687. /* Place the PLL mode in STANDBY */
  688. regmap_write(regmap, PLL_OPMODE(pll), PLL_STANDBY);
  689. regmap_update_bits(regmap, PLL_MODE(pll), PLL_RESET_N, PLL_RESET_N);
  690. }
  691. static unsigned long
  692. clk_trion_pll_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
  693. {
  694. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  695. u32 l, frac, alpha_width = pll_alpha_width(pll);
  696. regmap_read(pll->clkr.regmap, PLL_L_VAL(pll), &l);
  697. regmap_read(pll->clkr.regmap, PLL_ALPHA_VAL(pll), &frac);
  698. return alpha_pll_calc_rate(parent_rate, l, frac, alpha_width);
  699. }
  700. const struct clk_ops clk_alpha_pll_fixed_ops = {
  701. .enable = clk_alpha_pll_enable,
  702. .disable = clk_alpha_pll_disable,
  703. .is_enabled = clk_alpha_pll_is_enabled,
  704. .recalc_rate = clk_alpha_pll_recalc_rate,
  705. };
  706. EXPORT_SYMBOL_GPL(clk_alpha_pll_fixed_ops);
  707. const struct clk_ops clk_alpha_pll_ops = {
  708. .enable = clk_alpha_pll_enable,
  709. .disable = clk_alpha_pll_disable,
  710. .is_enabled = clk_alpha_pll_is_enabled,
  711. .recalc_rate = clk_alpha_pll_recalc_rate,
  712. .round_rate = clk_alpha_pll_round_rate,
  713. .set_rate = clk_alpha_pll_set_rate,
  714. };
  715. EXPORT_SYMBOL_GPL(clk_alpha_pll_ops);
  716. const struct clk_ops clk_alpha_pll_huayra_ops = {
  717. .enable = clk_alpha_pll_enable,
  718. .disable = clk_alpha_pll_disable,
  719. .is_enabled = clk_alpha_pll_is_enabled,
  720. .recalc_rate = alpha_pll_huayra_recalc_rate,
  721. .round_rate = alpha_pll_huayra_round_rate,
  722. .set_rate = alpha_pll_huayra_set_rate,
  723. };
  724. EXPORT_SYMBOL_GPL(clk_alpha_pll_huayra_ops);
  725. const struct clk_ops clk_alpha_pll_hwfsm_ops = {
  726. .enable = clk_alpha_pll_hwfsm_enable,
  727. .disable = clk_alpha_pll_hwfsm_disable,
  728. .is_enabled = clk_alpha_pll_hwfsm_is_enabled,
  729. .recalc_rate = clk_alpha_pll_recalc_rate,
  730. .round_rate = clk_alpha_pll_round_rate,
  731. .set_rate = clk_alpha_pll_hwfsm_set_rate,
  732. };
  733. EXPORT_SYMBOL_GPL(clk_alpha_pll_hwfsm_ops);
  734. const struct clk_ops clk_alpha_pll_fixed_trion_ops = {
  735. .enable = clk_trion_pll_enable,
  736. .disable = clk_trion_pll_disable,
  737. .is_enabled = clk_trion_pll_is_enabled,
  738. .recalc_rate = clk_trion_pll_recalc_rate,
  739. .round_rate = clk_alpha_pll_round_rate,
  740. };
  741. EXPORT_SYMBOL_GPL(clk_alpha_pll_fixed_trion_ops);
  742. static unsigned long
  743. clk_alpha_pll_postdiv_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
  744. {
  745. struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw);
  746. u32 ctl;
  747. regmap_read(pll->clkr.regmap, PLL_USER_CTL(pll), &ctl);
  748. ctl >>= PLL_POST_DIV_SHIFT;
  749. ctl &= PLL_POST_DIV_MASK(pll);
  750. return parent_rate >> fls(ctl);
  751. }
  752. static const struct clk_div_table clk_alpha_div_table[] = {
  753. { 0x0, 1 },
  754. { 0x1, 2 },
  755. { 0x3, 4 },
  756. { 0x7, 8 },
  757. { 0xf, 16 },
  758. { }
  759. };
  760. static const struct clk_div_table clk_alpha_2bit_div_table[] = {
  761. { 0x0, 1 },
  762. { 0x1, 2 },
  763. { 0x3, 4 },
  764. { }
  765. };
  766. static long
  767. clk_alpha_pll_postdiv_round_rate(struct clk_hw *hw, unsigned long rate,
  768. unsigned long *prate)
  769. {
  770. struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw);
  771. const struct clk_div_table *table;
  772. if (pll->width == 2)
  773. table = clk_alpha_2bit_div_table;
  774. else
  775. table = clk_alpha_div_table;
  776. return divider_round_rate(hw, rate, prate, table,
  777. pll->width, CLK_DIVIDER_POWER_OF_TWO);
  778. }
  779. static long
  780. clk_alpha_pll_postdiv_round_ro_rate(struct clk_hw *hw, unsigned long rate,
  781. unsigned long *prate)
  782. {
  783. struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw);
  784. u32 ctl, div;
  785. regmap_read(pll->clkr.regmap, PLL_USER_CTL(pll), &ctl);
  786. ctl >>= PLL_POST_DIV_SHIFT;
  787. ctl &= BIT(pll->width) - 1;
  788. div = 1 << fls(ctl);
  789. if (clk_hw_get_flags(hw) & CLK_SET_RATE_PARENT)
  790. *prate = clk_hw_round_rate(clk_hw_get_parent(hw), div * rate);
  791. return DIV_ROUND_UP_ULL((u64)*prate, div);
  792. }
  793. static int clk_alpha_pll_postdiv_set_rate(struct clk_hw *hw, unsigned long rate,
  794. unsigned long parent_rate)
  795. {
  796. struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw);
  797. int div;
  798. /* 16 -> 0xf, 8 -> 0x7, 4 -> 0x3, 2 -> 0x1, 1 -> 0x0 */
  799. div = DIV_ROUND_UP_ULL(parent_rate, rate) - 1;
  800. return regmap_update_bits(pll->clkr.regmap, PLL_USER_CTL(pll),
  801. PLL_POST_DIV_MASK(pll) << PLL_POST_DIV_SHIFT,
  802. div << PLL_POST_DIV_SHIFT);
  803. }
  804. const struct clk_ops clk_alpha_pll_postdiv_ops = {
  805. .recalc_rate = clk_alpha_pll_postdiv_recalc_rate,
  806. .round_rate = clk_alpha_pll_postdiv_round_rate,
  807. .set_rate = clk_alpha_pll_postdiv_set_rate,
  808. };
  809. EXPORT_SYMBOL_GPL(clk_alpha_pll_postdiv_ops);
  810. const struct clk_ops clk_alpha_pll_postdiv_ro_ops = {
  811. .round_rate = clk_alpha_pll_postdiv_round_ro_rate,
  812. .recalc_rate = clk_alpha_pll_postdiv_recalc_rate,
  813. };
  814. EXPORT_SYMBOL_GPL(clk_alpha_pll_postdiv_ro_ops);
  815. void clk_fabia_pll_configure(struct clk_alpha_pll *pll, struct regmap *regmap,
  816. const struct alpha_pll_config *config)
  817. {
  818. u32 val, mask;
  819. if (config->l)
  820. regmap_write(regmap, PLL_L_VAL(pll), config->l);
  821. if (config->alpha)
  822. regmap_write(regmap, PLL_FRAC(pll), config->alpha);
  823. if (config->config_ctl_val)
  824. regmap_write(regmap, PLL_CONFIG_CTL(pll),
  825. config->config_ctl_val);
  826. if (config->config_ctl_hi_val)
  827. regmap_write(regmap, PLL_CONFIG_CTL_U(pll),
  828. config->config_ctl_hi_val);
  829. if (config->user_ctl_val)
  830. regmap_write(regmap, PLL_USER_CTL(pll), config->user_ctl_val);
  831. if (config->user_ctl_hi_val)
  832. regmap_write(regmap, PLL_USER_CTL_U(pll),
  833. config->user_ctl_hi_val);
  834. if (config->test_ctl_val)
  835. regmap_write(regmap, PLL_TEST_CTL(pll),
  836. config->test_ctl_val);
  837. if (config->test_ctl_hi_val)
  838. regmap_write(regmap, PLL_TEST_CTL_U(pll),
  839. config->test_ctl_hi_val);
  840. if (config->post_div_mask) {
  841. mask = config->post_div_mask;
  842. val = config->post_div_val;
  843. regmap_update_bits(regmap, PLL_USER_CTL(pll), mask, val);
  844. }
  845. regmap_update_bits(regmap, PLL_MODE(pll), PLL_UPDATE_BYPASS,
  846. PLL_UPDATE_BYPASS);
  847. regmap_update_bits(regmap, PLL_MODE(pll), PLL_RESET_N, PLL_RESET_N);
  848. }
  849. EXPORT_SYMBOL_GPL(clk_fabia_pll_configure);
  850. static int alpha_pll_fabia_enable(struct clk_hw *hw)
  851. {
  852. int ret;
  853. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  854. u32 val, opmode_val;
  855. struct regmap *regmap = pll->clkr.regmap;
  856. ret = regmap_read(regmap, PLL_MODE(pll), &val);
  857. if (ret)
  858. return ret;
  859. /* If in FSM mode, just vote for it */
  860. if (val & PLL_VOTE_FSM_ENA) {
  861. ret = clk_enable_regmap(hw);
  862. if (ret)
  863. return ret;
  864. return wait_for_pll_enable_active(pll);
  865. }
  866. ret = regmap_read(regmap, PLL_OPMODE(pll), &opmode_val);
  867. if (ret)
  868. return ret;
  869. /* Skip If PLL is already running */
  870. if ((opmode_val & PLL_RUN) && (val & PLL_OUTCTRL))
  871. return 0;
  872. ret = regmap_update_bits(regmap, PLL_MODE(pll), PLL_OUTCTRL, 0);
  873. if (ret)
  874. return ret;
  875. ret = regmap_write(regmap, PLL_OPMODE(pll), PLL_STANDBY);
  876. if (ret)
  877. return ret;
  878. ret = regmap_update_bits(regmap, PLL_MODE(pll), PLL_RESET_N,
  879. PLL_RESET_N);
  880. if (ret)
  881. return ret;
  882. ret = regmap_write(regmap, PLL_OPMODE(pll), PLL_RUN);
  883. if (ret)
  884. return ret;
  885. ret = wait_for_pll_enable_lock(pll);
  886. if (ret)
  887. return ret;
  888. ret = regmap_update_bits(regmap, PLL_USER_CTL(pll),
  889. PLL_OUT_MASK, PLL_OUT_MASK);
  890. if (ret)
  891. return ret;
  892. return regmap_update_bits(regmap, PLL_MODE(pll), PLL_OUTCTRL,
  893. PLL_OUTCTRL);
  894. }
  895. static void alpha_pll_fabia_disable(struct clk_hw *hw)
  896. {
  897. int ret;
  898. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  899. u32 val;
  900. struct regmap *regmap = pll->clkr.regmap;
  901. ret = regmap_read(regmap, PLL_MODE(pll), &val);
  902. if (ret)
  903. return;
  904. /* If in FSM mode, just unvote it */
  905. if (val & PLL_FSM_ENA) {
  906. clk_disable_regmap(hw);
  907. return;
  908. }
  909. ret = regmap_update_bits(regmap, PLL_MODE(pll), PLL_OUTCTRL, 0);
  910. if (ret)
  911. return;
  912. /* Disable main outputs */
  913. ret = regmap_update_bits(regmap, PLL_USER_CTL(pll), PLL_OUT_MASK, 0);
  914. if (ret)
  915. return;
  916. /* Place the PLL in STANDBY */
  917. regmap_write(regmap, PLL_OPMODE(pll), PLL_STANDBY);
  918. }
  919. static unsigned long alpha_pll_fabia_recalc_rate(struct clk_hw *hw,
  920. unsigned long parent_rate)
  921. {
  922. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  923. u32 l, frac, alpha_width = pll_alpha_width(pll);
  924. regmap_read(pll->clkr.regmap, PLL_L_VAL(pll), &l);
  925. regmap_read(pll->clkr.regmap, PLL_FRAC(pll), &frac);
  926. return alpha_pll_calc_rate(parent_rate, l, frac, alpha_width);
  927. }
  928. static int alpha_pll_fabia_set_rate(struct clk_hw *hw, unsigned long rate,
  929. unsigned long prate)
  930. {
  931. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  932. u32 l, alpha_width = pll_alpha_width(pll);
  933. u64 a;
  934. unsigned long rrate, max = rate + PLL_RATE_MARGIN;
  935. rrate = alpha_pll_round_rate(rate, prate, &l, &a, alpha_width);
  936. /*
  937. * Due to limited number of bits for fractional rate programming, the
  938. * rounded up rate could be marginally higher than the requested rate.
  939. */
  940. if (rrate > (rate + PLL_RATE_MARGIN) || rrate < rate) {
  941. pr_err("%s: Rounded rate %lu not within range [%lu, %lu)\n",
  942. clk_hw_get_name(hw), rrate, rate, max);
  943. return -EINVAL;
  944. }
  945. regmap_write(pll->clkr.regmap, PLL_L_VAL(pll), l);
  946. regmap_write(pll->clkr.regmap, PLL_FRAC(pll), a);
  947. return __clk_alpha_pll_update_latch(pll);
  948. }
  949. static int alpha_pll_fabia_prepare(struct clk_hw *hw)
  950. {
  951. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  952. const struct pll_vco *vco;
  953. struct clk_hw *parent_hw;
  954. unsigned long cal_freq, rrate;
  955. u32 cal_l, val, alpha_width = pll_alpha_width(pll);
  956. const char *name = clk_hw_get_name(hw);
  957. u64 a;
  958. int ret;
  959. /* Check if calibration needs to be done i.e. PLL is in reset */
  960. ret = regmap_read(pll->clkr.regmap, PLL_MODE(pll), &val);
  961. if (ret)
  962. return ret;
  963. /* Return early if calibration is not needed. */
  964. if (val & PLL_RESET_N)
  965. return 0;
  966. vco = alpha_pll_find_vco(pll, clk_hw_get_rate(hw));
  967. if (!vco) {
  968. pr_err("%s: alpha pll not in a valid vco range\n", name);
  969. return -EINVAL;
  970. }
  971. cal_freq = DIV_ROUND_CLOSEST((pll->vco_table[0].min_freq +
  972. pll->vco_table[0].max_freq) * 54, 100);
  973. parent_hw = clk_hw_get_parent(hw);
  974. if (!parent_hw)
  975. return -EINVAL;
  976. rrate = alpha_pll_round_rate(cal_freq, clk_hw_get_rate(parent_hw),
  977. &cal_l, &a, alpha_width);
  978. /*
  979. * Due to a limited number of bits for fractional rate programming, the
  980. * rounded up rate could be marginally higher than the requested rate.
  981. */
  982. if (rrate > (cal_freq + PLL_RATE_MARGIN) || rrate < cal_freq)
  983. return -EINVAL;
  984. /* Setup PLL for calibration frequency */
  985. regmap_write(pll->clkr.regmap, PLL_CAL_L_VAL(pll), cal_l);
  986. /* Bringup the PLL at calibration frequency */
  987. ret = clk_alpha_pll_enable(hw);
  988. if (ret) {
  989. pr_err("%s: alpha pll calibration failed\n", name);
  990. return ret;
  991. }
  992. clk_alpha_pll_disable(hw);
  993. return 0;
  994. }
  995. const struct clk_ops clk_alpha_pll_fabia_ops = {
  996. .prepare = alpha_pll_fabia_prepare,
  997. .enable = alpha_pll_fabia_enable,
  998. .disable = alpha_pll_fabia_disable,
  999. .is_enabled = clk_alpha_pll_is_enabled,
  1000. .set_rate = alpha_pll_fabia_set_rate,
  1001. .recalc_rate = alpha_pll_fabia_recalc_rate,
  1002. .round_rate = clk_alpha_pll_round_rate,
  1003. };
  1004. EXPORT_SYMBOL_GPL(clk_alpha_pll_fabia_ops);
  1005. const struct clk_ops clk_alpha_pll_fixed_fabia_ops = {
  1006. .enable = alpha_pll_fabia_enable,
  1007. .disable = alpha_pll_fabia_disable,
  1008. .is_enabled = clk_alpha_pll_is_enabled,
  1009. .recalc_rate = alpha_pll_fabia_recalc_rate,
  1010. .round_rate = clk_alpha_pll_round_rate,
  1011. };
  1012. EXPORT_SYMBOL_GPL(clk_alpha_pll_fixed_fabia_ops);
  1013. static unsigned long clk_alpha_pll_postdiv_fabia_recalc_rate(struct clk_hw *hw,
  1014. unsigned long parent_rate)
  1015. {
  1016. struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw);
  1017. u32 i, div = 1, val;
  1018. int ret;
  1019. ret = regmap_read(pll->clkr.regmap, PLL_USER_CTL(pll), &val);
  1020. if (ret)
  1021. return ret;
  1022. val >>= pll->post_div_shift;
  1023. val &= BIT(pll->width) - 1;
  1024. for (i = 0; i < pll->num_post_div; i++) {
  1025. if (pll->post_div_table[i].val == val) {
  1026. div = pll->post_div_table[i].div;
  1027. break;
  1028. }
  1029. }
  1030. return (parent_rate / div);
  1031. }
  1032. static unsigned long
  1033. clk_trion_pll_postdiv_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
  1034. {
  1035. struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw);
  1036. struct regmap *regmap = pll->clkr.regmap;
  1037. u32 i, div = 1, val;
  1038. regmap_read(regmap, PLL_USER_CTL(pll), &val);
  1039. val >>= pll->post_div_shift;
  1040. val &= PLL_POST_DIV_MASK(pll);
  1041. for (i = 0; i < pll->num_post_div; i++) {
  1042. if (pll->post_div_table[i].val == val) {
  1043. div = pll->post_div_table[i].div;
  1044. break;
  1045. }
  1046. }
  1047. return (parent_rate / div);
  1048. }
  1049. static long
  1050. clk_trion_pll_postdiv_round_rate(struct clk_hw *hw, unsigned long rate,
  1051. unsigned long *prate)
  1052. {
  1053. struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw);
  1054. return divider_round_rate(hw, rate, prate, pll->post_div_table,
  1055. pll->width, CLK_DIVIDER_ROUND_CLOSEST);
  1056. };
  1057. static int
  1058. clk_trion_pll_postdiv_set_rate(struct clk_hw *hw, unsigned long rate,
  1059. unsigned long parent_rate)
  1060. {
  1061. struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw);
  1062. struct regmap *regmap = pll->clkr.regmap;
  1063. int i, val = 0, div;
  1064. div = DIV_ROUND_UP_ULL(parent_rate, rate);
  1065. for (i = 0; i < pll->num_post_div; i++) {
  1066. if (pll->post_div_table[i].div == div) {
  1067. val = pll->post_div_table[i].val;
  1068. break;
  1069. }
  1070. }
  1071. return regmap_update_bits(regmap, PLL_USER_CTL(pll),
  1072. PLL_POST_DIV_MASK(pll) << PLL_POST_DIV_SHIFT,
  1073. val << PLL_POST_DIV_SHIFT);
  1074. }
  1075. const struct clk_ops clk_alpha_pll_postdiv_trion_ops = {
  1076. .recalc_rate = clk_trion_pll_postdiv_recalc_rate,
  1077. .round_rate = clk_trion_pll_postdiv_round_rate,
  1078. .set_rate = clk_trion_pll_postdiv_set_rate,
  1079. };
  1080. EXPORT_SYMBOL_GPL(clk_alpha_pll_postdiv_trion_ops);
  1081. static long clk_alpha_pll_postdiv_fabia_round_rate(struct clk_hw *hw,
  1082. unsigned long rate, unsigned long *prate)
  1083. {
  1084. struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw);
  1085. return divider_round_rate(hw, rate, prate, pll->post_div_table,
  1086. pll->width, CLK_DIVIDER_ROUND_CLOSEST);
  1087. }
  1088. static int clk_alpha_pll_postdiv_fabia_set_rate(struct clk_hw *hw,
  1089. unsigned long rate, unsigned long parent_rate)
  1090. {
  1091. struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw);
  1092. int i, val = 0, div, ret;
  1093. /*
  1094. * If the PLL is in FSM mode, then treat set_rate callback as a
  1095. * no-operation.
  1096. */
  1097. ret = regmap_read(pll->clkr.regmap, PLL_MODE(pll), &val);
  1098. if (ret)
  1099. return ret;
  1100. if (val & PLL_VOTE_FSM_ENA)
  1101. return 0;
  1102. div = DIV_ROUND_UP_ULL(parent_rate, rate);
  1103. for (i = 0; i < pll->num_post_div; i++) {
  1104. if (pll->post_div_table[i].div == div) {
  1105. val = pll->post_div_table[i].val;
  1106. break;
  1107. }
  1108. }
  1109. return regmap_update_bits(pll->clkr.regmap, PLL_USER_CTL(pll),
  1110. (BIT(pll->width) - 1) << pll->post_div_shift,
  1111. val << pll->post_div_shift);
  1112. }
  1113. const struct clk_ops clk_alpha_pll_postdiv_fabia_ops = {
  1114. .recalc_rate = clk_alpha_pll_postdiv_fabia_recalc_rate,
  1115. .round_rate = clk_alpha_pll_postdiv_fabia_round_rate,
  1116. .set_rate = clk_alpha_pll_postdiv_fabia_set_rate,
  1117. };
  1118. EXPORT_SYMBOL_GPL(clk_alpha_pll_postdiv_fabia_ops);
  1119. /**
  1120. * clk_lucid_pll_configure - configure the lucid pll
  1121. *
  1122. * @pll: clk alpha pll
  1123. * @regmap: register map
  1124. * @config: configuration to apply for pll
  1125. */
  1126. void clk_trion_pll_configure(struct clk_alpha_pll *pll, struct regmap *regmap,
  1127. const struct alpha_pll_config *config)
  1128. {
  1129. if (config->l)
  1130. regmap_write(regmap, PLL_L_VAL(pll), config->l);
  1131. regmap_write(regmap, PLL_CAL_L_VAL(pll), TRION_PLL_CAL_VAL);
  1132. if (config->alpha)
  1133. regmap_write(regmap, PLL_ALPHA_VAL(pll), config->alpha);
  1134. if (config->config_ctl_val)
  1135. regmap_write(regmap, PLL_CONFIG_CTL(pll),
  1136. config->config_ctl_val);
  1137. if (config->config_ctl_hi_val)
  1138. regmap_write(regmap, PLL_CONFIG_CTL_U(pll),
  1139. config->config_ctl_hi_val);
  1140. if (config->config_ctl_hi1_val)
  1141. regmap_write(regmap, PLL_CONFIG_CTL_U1(pll),
  1142. config->config_ctl_hi1_val);
  1143. if (config->user_ctl_val)
  1144. regmap_write(regmap, PLL_USER_CTL(pll),
  1145. config->user_ctl_val);
  1146. if (config->user_ctl_hi_val)
  1147. regmap_write(regmap, PLL_USER_CTL_U(pll),
  1148. config->user_ctl_hi_val);
  1149. if (config->user_ctl_hi1_val)
  1150. regmap_write(regmap, PLL_USER_CTL_U1(pll),
  1151. config->user_ctl_hi1_val);
  1152. if (config->test_ctl_val)
  1153. regmap_write(regmap, PLL_TEST_CTL(pll),
  1154. config->test_ctl_val);
  1155. if (config->test_ctl_hi_val)
  1156. regmap_write(regmap, PLL_TEST_CTL_U(pll),
  1157. config->test_ctl_hi_val);
  1158. if (config->test_ctl_hi1_val)
  1159. regmap_write(regmap, PLL_TEST_CTL_U1(pll),
  1160. config->test_ctl_hi1_val);
  1161. regmap_update_bits(regmap, PLL_MODE(pll), PLL_UPDATE_BYPASS,
  1162. PLL_UPDATE_BYPASS);
  1163. /* Disable PLL output */
  1164. regmap_update_bits(regmap, PLL_MODE(pll), PLL_OUTCTRL, 0);
  1165. /* Set operation mode to OFF */
  1166. regmap_write(regmap, PLL_OPMODE(pll), PLL_STANDBY);
  1167. /* Place the PLL in STANDBY mode */
  1168. regmap_update_bits(regmap, PLL_MODE(pll), PLL_RESET_N, PLL_RESET_N);
  1169. }
  1170. EXPORT_SYMBOL_GPL(clk_trion_pll_configure);
  1171. /*
  1172. * The TRION PLL requires a power-on self-calibration which happens when the
  1173. * PLL comes out of reset. Calibrate in case it is not completed.
  1174. */
  1175. static int __alpha_pll_trion_prepare(struct clk_hw *hw, u32 pcal_done)
  1176. {
  1177. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  1178. u32 regval;
  1179. int ret;
  1180. /* Return early if calibration is not needed. */
  1181. regmap_read(pll->clkr.regmap, PLL_STATUS(pll), &regval);
  1182. if (regval & pcal_done)
  1183. return 0;
  1184. /* On/off to calibrate */
  1185. ret = clk_trion_pll_enable(hw);
  1186. if (!ret)
  1187. clk_trion_pll_disable(hw);
  1188. return ret;
  1189. }
  1190. static int alpha_pll_trion_prepare(struct clk_hw *hw)
  1191. {
  1192. return __alpha_pll_trion_prepare(hw, TRION_PCAL_DONE);
  1193. }
  1194. static int alpha_pll_lucid_prepare(struct clk_hw *hw)
  1195. {
  1196. return __alpha_pll_trion_prepare(hw, LUCID_PCAL_DONE);
  1197. }
  1198. static int alpha_pll_trion_set_rate(struct clk_hw *hw, unsigned long rate,
  1199. unsigned long prate)
  1200. {
  1201. struct clk_alpha_pll *pll = to_clk_alpha_pll(hw);
  1202. unsigned long rrate;
  1203. u32 regval, l, alpha_width = pll_alpha_width(pll);
  1204. u64 a;
  1205. int ret;
  1206. rrate = alpha_pll_round_rate(rate, prate, &l, &a, alpha_width);
  1207. /*
  1208. * Due to a limited number of bits for fractional rate programming, the
  1209. * rounded up rate could be marginally higher than the requested rate.
  1210. */
  1211. if (rrate > (rate + PLL_RATE_MARGIN) || rrate < rate) {
  1212. pr_err("Call set rate on the PLL with rounded rates!\n");
  1213. return -EINVAL;
  1214. }
  1215. regmap_write(pll->clkr.regmap, PLL_L_VAL(pll), l);
  1216. regmap_write(pll->clkr.regmap, PLL_ALPHA_VAL(pll), a);
  1217. /* Latch the PLL input */
  1218. ret = regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll),
  1219. PLL_UPDATE, PLL_UPDATE);
  1220. if (ret)
  1221. return ret;
  1222. /* Wait for 2 reference cycles before checking the ACK bit. */
  1223. udelay(1);
  1224. regmap_read(pll->clkr.regmap, PLL_MODE(pll), &regval);
  1225. if (!(regval & ALPHA_PLL_ACK_LATCH)) {
  1226. pr_err("Lucid PLL latch failed. Output may be unstable!\n");
  1227. return -EINVAL;
  1228. }
  1229. /* Return the latch input to 0 */
  1230. ret = regmap_update_bits(pll->clkr.regmap, PLL_MODE(pll),
  1231. PLL_UPDATE, 0);
  1232. if (ret)
  1233. return ret;
  1234. if (clk_hw_is_enabled(hw)) {
  1235. ret = wait_for_pll_enable_lock(pll);
  1236. if (ret)
  1237. return ret;
  1238. }
  1239. /* Wait for PLL output to stabilize */
  1240. udelay(100);
  1241. return 0;
  1242. }
  1243. const struct clk_ops clk_alpha_pll_trion_ops = {
  1244. .prepare = alpha_pll_trion_prepare,
  1245. .enable = clk_trion_pll_enable,
  1246. .disable = clk_trion_pll_disable,
  1247. .is_enabled = clk_trion_pll_is_enabled,
  1248. .recalc_rate = clk_trion_pll_recalc_rate,
  1249. .round_rate = clk_alpha_pll_round_rate,
  1250. .set_rate = alpha_pll_trion_set_rate,
  1251. };
  1252. EXPORT_SYMBOL_GPL(clk_alpha_pll_trion_ops);
  1253. const struct clk_ops clk_alpha_pll_lucid_ops = {
  1254. .prepare = alpha_pll_lucid_prepare,
  1255. .enable = clk_trion_pll_enable,
  1256. .disable = clk_trion_pll_disable,
  1257. .is_enabled = clk_trion_pll_is_enabled,
  1258. .recalc_rate = clk_trion_pll_recalc_rate,
  1259. .round_rate = clk_alpha_pll_round_rate,
  1260. .set_rate = alpha_pll_trion_set_rate,
  1261. };
  1262. EXPORT_SYMBOL_GPL(clk_alpha_pll_lucid_ops);
  1263. const struct clk_ops clk_alpha_pll_postdiv_lucid_ops = {
  1264. .recalc_rate = clk_alpha_pll_postdiv_fabia_recalc_rate,
  1265. .round_rate = clk_alpha_pll_postdiv_fabia_round_rate,
  1266. .set_rate = clk_alpha_pll_postdiv_fabia_set_rate,
  1267. };
  1268. EXPORT_SYMBOL_GPL(clk_alpha_pll_postdiv_lucid_ops);